From e96fb54fef91a08df3e6b3d3cf690fcf15c0dd94 Mon Sep 17 00:00:00 2001
From: Kelly Hofmann <55991524+k3llymariee@users.noreply.github.com>
Date: Wed, 1 May 2024 09:49:55 -0700
Subject: [PATCH] feat: generate teams operation data from openapi spec (#226)
* add temp openapi spec just for teams
* vendor openapi loader, read from file
* generate template data from openapi spec (teams only)
* get schema from component name not tag
* add tests
* fix imports
* fix tests, remove pointers
---
cmd/resources/gen_resources.go | 139 +
cmd/resources/gen_resources_test.go | 28 +
cmd/resources/resource_cmds.go | 1 +
cmd/resources/resources.go | 19 -
.../test_data/expected_template_data.json | 109 +
cmd/resources/test_data/test-openapi.json | 1323 ++++++++++
go.mod | 8 +
go.sum | 23 +-
ld-teams-openapi.json | 1812 +++++++++++++
tools.go | 1 +
vendor/github.com/getkin/kin-openapi/LICENSE | 21 +
.../getkin/kin-openapi/openapi3/callback.go | 54 +
.../getkin/kin-openapi/openapi3/components.go | 361 +++
.../getkin/kin-openapi/openapi3/contact.go | 59 +
.../getkin/kin-openapi/openapi3/content.go | 124 +
.../kin-openapi/openapi3/discriminator.go | 52 +
.../getkin/kin-openapi/openapi3/doc.go | 4 +
.../getkin/kin-openapi/openapi3/encoding.go | 139 +
.../getkin/kin-openapi/openapi3/errors.go | 59 +
.../getkin/kin-openapi/openapi3/example.go | 76 +
.../openapi3/example_validation.go | 16 +
.../getkin/kin-openapi/openapi3/extension.go | 32 +
.../kin-openapi/openapi3/external_docs.go | 64 +
.../getkin/kin-openapi/openapi3/header.go | 96 +
.../getkin/kin-openapi/openapi3/helpers.go | 41 +
.../getkin/kin-openapi/openapi3/info.go | 91 +
.../kin-openapi/openapi3/internalize_refs.go | 443 ++++
.../getkin/kin-openapi/openapi3/license.go | 57 +
.../getkin/kin-openapi/openapi3/link.go | 85 +
.../getkin/kin-openapi/openapi3/loader.go | 1118 ++++++++
.../kin-openapi/openapi3/loader_uri_reader.go | 116 +
.../getkin/kin-openapi/openapi3/maplike.go | 366 +++
.../getkin/kin-openapi/openapi3/marsh.go | 26 +
.../getkin/kin-openapi/openapi3/media_type.go | 170 ++
.../getkin/kin-openapi/openapi3/openapi3.go | 191 ++
.../getkin/kin-openapi/openapi3/operation.go | 213 ++
.../getkin/kin-openapi/openapi3/parameter.go | 407 +++
.../getkin/kin-openapi/openapi3/path_item.go | 239 ++
.../getkin/kin-openapi/openapi3/paths.go | 283 +++
.../getkin/kin-openapi/openapi3/ref.go | 7 +
.../getkin/kin-openapi/openapi3/refs.go | 713 ++++++
.../kin-openapi/openapi3/request_body.go | 129 +
.../getkin/kin-openapi/openapi3/response.go | 233 ++
.../getkin/kin-openapi/openapi3/schema.go | 2237 +++++++++++++++++
.../kin-openapi/openapi3/schema_formats.go | 106 +
.../kin-openapi/openapi3/schema_pattern.go | 29 +
.../openapi3/schema_validation_settings.go | 79 +
.../openapi3/security_requirements.go | 51 +
.../kin-openapi/openapi3/security_scheme.go | 402 +++
.../openapi3/serialization_method.go | 17 +
.../getkin/kin-openapi/openapi3/server.go | 284 +++
.../getkin/kin-openapi/openapi3/tag.go | 90 +
.../openapi3/testdata/circularRef/base.yml | 16 +
.../openapi3/testdata/circularRef/other.yml | 10 +
.../testdata/recursiveRef/components/Bar.yml | 2 +
.../testdata/recursiveRef/components/Cat.yml | 4 +
.../testdata/recursiveRef/components/Foo.yml | 4 +
.../recursiveRef/components/Foo/Foo2.yml | 4 +
.../recursiveRef/components/models/error.yaml | 2 +
.../testdata/recursiveRef/issue615.yml | 60 +
.../testdata/recursiveRef/openapi.yml | 33 +
.../recursiveRef/openapi.yml.internalized.yml | 110 +
.../recursiveRef/parameters/number.yml | 4 +
.../testdata/recursiveRef/paths/foo.yml | 15 +
.../openapi3/validation_options.go | 112 +
.../getkin/kin-openapi/openapi3/visited.go | 41 +
.../getkin/kin-openapi/openapi3/xml.go | 69 +
.../go-openapi/jsonpointer/.editorconfig | 26 +
.../go-openapi/jsonpointer/.gitignore | 1 +
.../go-openapi/jsonpointer/.golangci.yml | 61 +
.../go-openapi/jsonpointer/CODE_OF_CONDUCT.md | 74 +
.../github.com/go-openapi/jsonpointer/LICENSE | 202 ++
.../go-openapi/jsonpointer/README.md | 19 +
.../go-openapi/jsonpointer/pointer.go | 531 ++++
.../github.com/go-openapi/swag/.editorconfig | 26 +
.../github.com/go-openapi/swag/.gitattributes | 2 +
vendor/github.com/go-openapi/swag/.gitignore | 5 +
.../github.com/go-openapi/swag/.golangci.yml | 60 +
.../github.com/go-openapi/swag/BENCHMARK.md | 52 +
.../go-openapi/swag/CODE_OF_CONDUCT.md | 74 +
vendor/github.com/go-openapi/swag/LICENSE | 202 ++
vendor/github.com/go-openapi/swag/README.md | 23 +
vendor/github.com/go-openapi/swag/convert.go | 208 ++
.../go-openapi/swag/convert_types.go | 730 ++++++
vendor/github.com/go-openapi/swag/doc.go | 31 +
vendor/github.com/go-openapi/swag/file.go | 33 +
.../go-openapi/swag/initialism_index.go | 202 ++
vendor/github.com/go-openapi/swag/json.go | 312 +++
vendor/github.com/go-openapi/swag/loading.go | 176 ++
.../github.com/go-openapi/swag/name_lexem.go | 93 +
vendor/github.com/go-openapi/swag/net.go | 38 +
vendor/github.com/go-openapi/swag/path.go | 59 +
vendor/github.com/go-openapi/swag/split.go | 508 ++++
.../go-openapi/swag/string_bytes.go | 22 +
vendor/github.com/go-openapi/swag/util.go | 364 +++
vendor/github.com/go-openapi/swag/yaml.go | 480 ++++
vendor/github.com/invopop/yaml/.gitignore | 20 +
vendor/github.com/invopop/yaml/.golangci.toml | 15 +
vendor/github.com/invopop/yaml/LICENSE | 50 +
vendor/github.com/invopop/yaml/README.md | 128 +
vendor/github.com/invopop/yaml/fields.go | 498 ++++
vendor/github.com/invopop/yaml/yaml.go | 314 +++
vendor/github.com/josharian/intern/README.md | 5 +
vendor/github.com/josharian/intern/intern.go | 44 +
vendor/github.com/josharian/intern/license.md | 21 +
vendor/github.com/mailru/easyjson/LICENSE | 7 +
.../github.com/mailru/easyjson/buffer/pool.go | 278 ++
.../mailru/easyjson/jlexer/bytestostr.go | 24 +
.../easyjson/jlexer/bytestostr_nounsafe.go | 13 +
.../mailru/easyjson/jlexer/error.go | 15 +
.../mailru/easyjson/jlexer/lexer.go | 1244 +++++++++
.../mailru/easyjson/jwriter/writer.go | 405 +++
vendor/github.com/mohae/deepcopy/.gitignore | 26 +
vendor/github.com/mohae/deepcopy/.travis.yml | 11 +
vendor/github.com/mohae/deepcopy/LICENSE | 21 +
vendor/github.com/mohae/deepcopy/README.md | 8 +
vendor/github.com/mohae/deepcopy/deepcopy.go | 125 +
.../perimeterx/marshmallow/.gitignore | 4 +
.../perimeterx/marshmallow/CHANGELOG.md | 49 +
.../perimeterx/marshmallow/CODE_OF_CONDUCT.md | 133 +
.../perimeterx/marshmallow/CONTRIBUTING.md | 47 +
.../github.com/perimeterx/marshmallow/LICENSE | 21 +
.../perimeterx/marshmallow/README.md | 205 ++
.../perimeterx/marshmallow/cache.go | 63 +
.../github.com/perimeterx/marshmallow/doc.go | 10 +
.../perimeterx/marshmallow/errors.go | 101 +
.../perimeterx/marshmallow/options.go | 96 +
.../perimeterx/marshmallow/reflection.go | 197 ++
.../perimeterx/marshmallow/unmarshal.go | 383 +++
.../marshmallow/unmarshal_from_json_map.go | 295 +++
vendor/modules.txt | 26 +
131 files changed, 22824 insertions(+), 21 deletions(-)
create mode 100644 cmd/resources/gen_resources.go
create mode 100644 cmd/resources/gen_resources_test.go
create mode 100644 cmd/resources/test_data/expected_template_data.json
create mode 100644 cmd/resources/test_data/test-openapi.json
create mode 100644 ld-teams-openapi.json
create mode 100644 tools.go
create mode 100644 vendor/github.com/getkin/kin-openapi/LICENSE
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/callback.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/components.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/contact.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/content.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/discriminator.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/doc.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/encoding.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/errors.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/example.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/example_validation.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/extension.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/external_docs.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/header.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/helpers.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/info.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/internalize_refs.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/license.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/link.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/loader.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/loader_uri_reader.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/maplike.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/marsh.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/media_type.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/openapi3.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/operation.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/parameter.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/path_item.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/paths.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/ref.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/refs.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/request_body.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/response.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/schema.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/schema_formats.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/schema_pattern.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/schema_validation_settings.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/security_requirements.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/security_scheme.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/serialization_method.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/server.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/tag.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/base.yml
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/other.yml
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Bar.yml
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Cat.yml
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo.yml
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo/Foo2.yml
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/models/error.yaml
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/issue615.yml
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml.internalized.yml
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/parameters/number.yml
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/paths/foo.yml
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/validation_options.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/visited.go
create mode 100644 vendor/github.com/getkin/kin-openapi/openapi3/xml.go
create mode 100644 vendor/github.com/go-openapi/jsonpointer/.editorconfig
create mode 100644 vendor/github.com/go-openapi/jsonpointer/.gitignore
create mode 100644 vendor/github.com/go-openapi/jsonpointer/.golangci.yml
create mode 100644 vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md
create mode 100644 vendor/github.com/go-openapi/jsonpointer/LICENSE
create mode 100644 vendor/github.com/go-openapi/jsonpointer/README.md
create mode 100644 vendor/github.com/go-openapi/jsonpointer/pointer.go
create mode 100644 vendor/github.com/go-openapi/swag/.editorconfig
create mode 100644 vendor/github.com/go-openapi/swag/.gitattributes
create mode 100644 vendor/github.com/go-openapi/swag/.gitignore
create mode 100644 vendor/github.com/go-openapi/swag/.golangci.yml
create mode 100644 vendor/github.com/go-openapi/swag/BENCHMARK.md
create mode 100644 vendor/github.com/go-openapi/swag/CODE_OF_CONDUCT.md
create mode 100644 vendor/github.com/go-openapi/swag/LICENSE
create mode 100644 vendor/github.com/go-openapi/swag/README.md
create mode 100644 vendor/github.com/go-openapi/swag/convert.go
create mode 100644 vendor/github.com/go-openapi/swag/convert_types.go
create mode 100644 vendor/github.com/go-openapi/swag/doc.go
create mode 100644 vendor/github.com/go-openapi/swag/file.go
create mode 100644 vendor/github.com/go-openapi/swag/initialism_index.go
create mode 100644 vendor/github.com/go-openapi/swag/json.go
create mode 100644 vendor/github.com/go-openapi/swag/loading.go
create mode 100644 vendor/github.com/go-openapi/swag/name_lexem.go
create mode 100644 vendor/github.com/go-openapi/swag/net.go
create mode 100644 vendor/github.com/go-openapi/swag/path.go
create mode 100644 vendor/github.com/go-openapi/swag/split.go
create mode 100644 vendor/github.com/go-openapi/swag/string_bytes.go
create mode 100644 vendor/github.com/go-openapi/swag/util.go
create mode 100644 vendor/github.com/go-openapi/swag/yaml.go
create mode 100644 vendor/github.com/invopop/yaml/.gitignore
create mode 100644 vendor/github.com/invopop/yaml/.golangci.toml
create mode 100644 vendor/github.com/invopop/yaml/LICENSE
create mode 100644 vendor/github.com/invopop/yaml/README.md
create mode 100644 vendor/github.com/invopop/yaml/fields.go
create mode 100644 vendor/github.com/invopop/yaml/yaml.go
create mode 100644 vendor/github.com/josharian/intern/README.md
create mode 100644 vendor/github.com/josharian/intern/intern.go
create mode 100644 vendor/github.com/josharian/intern/license.md
create mode 100644 vendor/github.com/mailru/easyjson/LICENSE
create mode 100644 vendor/github.com/mailru/easyjson/buffer/pool.go
create mode 100644 vendor/github.com/mailru/easyjson/jlexer/bytestostr.go
create mode 100644 vendor/github.com/mailru/easyjson/jlexer/bytestostr_nounsafe.go
create mode 100644 vendor/github.com/mailru/easyjson/jlexer/error.go
create mode 100644 vendor/github.com/mailru/easyjson/jlexer/lexer.go
create mode 100644 vendor/github.com/mailru/easyjson/jwriter/writer.go
create mode 100644 vendor/github.com/mohae/deepcopy/.gitignore
create mode 100644 vendor/github.com/mohae/deepcopy/.travis.yml
create mode 100644 vendor/github.com/mohae/deepcopy/LICENSE
create mode 100644 vendor/github.com/mohae/deepcopy/README.md
create mode 100644 vendor/github.com/mohae/deepcopy/deepcopy.go
create mode 100644 vendor/github.com/perimeterx/marshmallow/.gitignore
create mode 100644 vendor/github.com/perimeterx/marshmallow/CHANGELOG.md
create mode 100644 vendor/github.com/perimeterx/marshmallow/CODE_OF_CONDUCT.md
create mode 100644 vendor/github.com/perimeterx/marshmallow/CONTRIBUTING.md
create mode 100644 vendor/github.com/perimeterx/marshmallow/LICENSE
create mode 100644 vendor/github.com/perimeterx/marshmallow/README.md
create mode 100644 vendor/github.com/perimeterx/marshmallow/cache.go
create mode 100644 vendor/github.com/perimeterx/marshmallow/doc.go
create mode 100644 vendor/github.com/perimeterx/marshmallow/errors.go
create mode 100644 vendor/github.com/perimeterx/marshmallow/options.go
create mode 100644 vendor/github.com/perimeterx/marshmallow/reflection.go
create mode 100644 vendor/github.com/perimeterx/marshmallow/unmarshal.go
create mode 100644 vendor/github.com/perimeterx/marshmallow/unmarshal_from_json_map.go
diff --git a/cmd/resources/gen_resources.go b/cmd/resources/gen_resources.go
new file mode 100644
index 00000000..faf4071f
--- /dev/null
+++ b/cmd/resources/gen_resources.go
@@ -0,0 +1,139 @@
+package resources
+
+import (
+ "log"
+ "os"
+ "strconv"
+ "strings"
+
+ "github.com/getkin/kin-openapi/openapi3"
+)
+
+type TemplateData struct {
+ Resources map[string]ResourceData
+}
+
+type ResourceData struct {
+ Name string
+ Description string
+ Operations map[string]OperationData
+}
+
+type OperationData struct {
+ Short string
+ Long string
+ Use string
+ Params []Param
+ HTTPMethod string
+ RequiresBody bool
+ Path string
+ SupportsSemanticPatch bool
+}
+
+type Param struct {
+ Name string
+ In string
+ Description string
+ Type string
+ Required bool
+}
+
+func GetTemplateData(fileName string) (TemplateData, error) {
+ rawFile, err := os.ReadFile(fileName)
+ if err != nil {
+ return TemplateData{}, err
+ }
+
+ loader := openapi3.NewLoader()
+ spec, err := loader.LoadFromData(rawFile)
+ if err != nil {
+ return TemplateData{}, err
+ }
+
+ resources := make(map[string]ResourceData)
+ for _, r := range spec.Tags {
+ resources[r.Name] = ResourceData{
+ Name: r.Name,
+ Description: r.Description,
+ Operations: make(map[string]OperationData, 0),
+ }
+ }
+
+ for path, pathItem := range spec.Paths.Map() {
+ for method, op := range pathItem.Operations() {
+ tag := op.Tags[0] // TODO: confirm each op only has one tag
+ resource, ok := resources[tag]
+ if !ok {
+ log.Printf("Matching resource not found for %s operation's tag: %s", op.OperationID, tag)
+ continue
+ }
+
+ use := getCmdUse(method, op, spec)
+
+ operation := OperationData{
+ Short: op.Summary,
+ Long: op.Description,
+ Use: use,
+ Params: make([]Param, 0),
+ HTTPMethod: method,
+ RequiresBody: method == "PUT" || method == "POST" || method == "PATCH",
+ Path: path,
+ }
+
+ for _, p := range op.Parameters {
+ if p.Value != nil {
+ // TODO: confirm if we only have one type per param b/c somehow this is a slice
+ types := *p.Value.Schema.Value.Type
+ param := Param{
+ Name: p.Value.Name,
+ In: p.Value.In,
+ Description: p.Value.Description,
+ Type: types[0],
+ Required: p.Value.Required,
+ }
+ operation.Params = append(operation.Params, param)
+ }
+ }
+
+ resource.Operations[op.OperationID] = operation
+ }
+ }
+
+ return TemplateData{Resources: resources}, nil
+}
+
+func getCmdUse(method string, op *openapi3.Operation, spec *openapi3.T) string {
+ methodMap := map[string]string{
+ "GET": "get",
+ "POST": "create",
+ "PUT": "replace", // TODO: confirm this
+ "DELETE": "delete",
+ "PATCH": "update",
+ }
+
+ use := methodMap[method]
+
+ var schema *openapi3.SchemaRef
+ for respType, respInfo := range op.Responses.Map() {
+ respCode, _ := strconv.Atoi(respType)
+ if respCode < 300 {
+ for _, s := range respInfo.Value.Content {
+ schemaName := strings.TrimPrefix(s.Schema.Ref, "#/components/schemas/")
+ schema = spec.Components.Schemas[schemaName]
+ }
+ }
+ }
+
+ if schema == nil {
+ // probably won't need to keep this logging in but leaving it for debugging purposes
+ log.Printf("No response type defined for %s", op.OperationID)
+ } else {
+ for propName := range schema.Value.Properties {
+ if propName == "items" {
+ use = "list"
+ break
+ }
+ }
+ }
+ return use
+}
diff --git a/cmd/resources/gen_resources_test.go b/cmd/resources/gen_resources_test.go
new file mode 100644
index 00000000..95d15541
--- /dev/null
+++ b/cmd/resources/gen_resources_test.go
@@ -0,0 +1,28 @@
+package resources_test
+
+import (
+ "encoding/json"
+ "os"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+
+ "ldcli/cmd/resources"
+)
+
+func TestGetTemplateData(t *testing.T) {
+ actual, err := resources.GetTemplateData("test_data/test-openapi.json")
+ assert.NoError(t, err)
+
+ expectedFromFile, err := os.ReadFile("test_data/expected_template_data.json")
+ require.NoError(t, err)
+
+ var expected resources.TemplateData
+ err = json.Unmarshal(expectedFromFile, &expected)
+ require.NoError(t, err)
+
+ t.Run("succeeds with single get resource", func(t *testing.T) {
+ assert.Equal(t, expected, actual)
+ })
+}
diff --git a/cmd/resources/resource_cmds.go b/cmd/resources/resource_cmds.go
index 29e6a2f6..f253287f 100644
--- a/cmd/resources/resource_cmds.go
+++ b/cmd/resources/resource_cmds.go
@@ -24,6 +24,7 @@ func AddAllResourceCmds(rootCmd *cobra.Command, client resources.Client, analyti
Short: "Create team",
Long: "Create a team. To learn more, read [Creating a team](https://docs.launchdarkly.com/home/teams/creating).\n\n### Expanding the teams response\nLaunchDarkly supports four fields for expanding the \"Create team\" response. By default, these fields are **not** included in the response.\n\nTo expand the response, append the `expand` query parameter and add a comma-separated list with any of the following fields:\n\n* `members` includes the total count of members that belong to the team.\n* `roles` includes a paginated list of the custom roles that you have assigned to the team.\n* `projects` includes a paginated list of the projects that the team has any write access to.\n* `maintainers` includes a paginated list of the maintainers that you have assigned to the team.\n\nFor example, `expand=members,roles` includes the `members` and `roles` fields in the response.\n",
Use: "create", // TODO: translate post -> create
+
Params: []Param{
{
Name: "expand",
diff --git a/cmd/resources/resources.go b/cmd/resources/resources.go
index 330ca49d..65c674a5 100644
--- a/cmd/resources/resources.go
+++ b/cmd/resources/resources.go
@@ -39,25 +39,6 @@ func NewResourceCmd(parentCmd *cobra.Command, analyticsTracker analytics.Tracker
return cmd
}
-type OperationData struct {
- Short string
- Long string
- Use string
- Params []Param
- HTTPMethod string
- RequiresBody bool
- Path string
- SupportsSemanticPatch bool // TBD on how to actually determine from openapi spec
-}
-
-type Param struct {
- Name string
- In string
- Description string
- Type string
- Required bool
-}
-
type OperationCmd struct {
OperationData
client resources.Client
diff --git a/cmd/resources/test_data/expected_template_data.json b/cmd/resources/test_data/expected_template_data.json
new file mode 100644
index 00000000..91973e93
--- /dev/null
+++ b/cmd/resources/test_data/expected_template_data.json
@@ -0,0 +1,109 @@
+{
+ "Resources": {
+ "Teams": {
+ "Name": "Teams",
+ "Description": "A team is a group of members in your LaunchDarkly account.",
+ "Operations": {
+ "deleteTeam": {
+ "Short": "Delete team",
+ "Long": "Delete a team by key.",
+ "Use": "delete",
+ "Params": [
+ {
+ "Name": "teamKey",
+ "In": "path",
+ "Description": "The team key",
+ "Type": "string",
+ "Required": true
+ }
+ ],
+ "HTTPMethod": "DELETE",
+ "RequiresBody": false,
+ "Path": "/api/v2/teams/{teamKey}"
+ },
+ "getTeam": {
+ "Short": "Get team",
+ "Long": "Get team",
+ "Use": "get",
+ "Params": [
+ {
+ "Name": "teamKey",
+ "In": "path",
+ "Description": "The team key.",
+ "Type": "string",
+ "Required": true
+ },
+ {
+ "Name": "expand",
+ "In": "query",
+ "Description": "A comma-separated list of properties that can reveal additional information in the response.",
+ "Type": "string",
+ "Required": false
+ }
+ ],
+ "HTTPMethod": "GET",
+ "RequiresBody": false,
+ "Path": "/api/v2/teams/{teamKey}"
+ },
+ "getTeams": {
+ "Short": "List teams",
+ "Long": "Return a list of teams.",
+ "Use": "list",
+ "Params": [
+ {
+ "Name": "limit",
+ "In": "query",
+ "Description": "The number of teams to return in the response. Defaults to 20.",
+ "Type": "integer",
+ "Required": false
+ }
+ ],
+ "HTTPMethod": "GET",
+ "RequiresBody": false,
+ "Path": "/api/v2/teams"
+ },
+ "patchTeam": {
+ "Short": "Update team",
+ "Long": "Perform a partial update to a team.",
+ "Use": "update",
+ "Params": [
+ {
+ "Name": "teamKey",
+ "In": "path",
+ "Description": "The team key",
+ "Type": "string",
+ "Required": true
+ },
+ {
+ "Name": "expand",
+ "In": "query",
+ "Description": "A comma-separated list of properties.",
+ "Type": "string",
+ "Required": false
+ }
+ ],
+ "HTTPMethod": "PATCH",
+ "RequiresBody": true,
+ "Path": "/api/v2/teams/{teamKey}"
+ },
+ "postTeam": {
+ "Short": "Create team",
+ "Long": "Create a team.",
+ "Use": "create",
+ "Params": [
+ {
+ "Name": "expand",
+ "In": "query",
+ "Description": "A comma-separated list of properties.",
+ "Type": "string",
+ "Required": false
+ }
+ ],
+ "HTTPMethod": "POST",
+ "RequiresBody": true,
+ "Path": "/api/v2/teams"
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/cmd/resources/test_data/test-openapi.json b/cmd/resources/test_data/test-openapi.json
new file mode 100644
index 00000000..d263a1b4
--- /dev/null
+++ b/cmd/resources/test_data/test-openapi.json
@@ -0,0 +1,1323 @@
+{
+ "openapi": "3.0.3",
+ "info": {
+ "title": "LaunchDarkly TEST REST API",
+ "description": "Test file"
+ },
+ "tags": [
+ {
+ "name": "Teams",
+ "description": "A team is a group of members in your LaunchDarkly account."
+ }
+ ],
+ "paths": {
+ "/api/v2/teams": {
+ "get": {
+ "responses": {
+ "200": {
+ "description": "Teams collection response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Teams"
+ }
+ }
+ }
+ },
+ "401": {
+ "description": "Invalid access token",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/UnauthorizedErrorRep"
+ }
+ }
+ }
+ },
+ "405": {
+ "description": "Method not allowed",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/MethodNotAllowedErrorRep"
+ }
+ }
+ }
+ },
+ "429": {
+ "description": "Rate limited",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/RateLimitedErrorRep"
+ }
+ }
+ }
+ }
+ },
+ "tags": [
+ "Teams"
+ ],
+ "summary": "List teams",
+ "description": "Return a list of teams.",
+ "parameters": [
+ {
+ "name": "limit",
+ "in": "query",
+ "description": "The number of teams to return in the response. Defaults to 20.",
+ "schema": {
+ "type": "integer",
+ "format": "int64",
+ "description": "The number of teams to return in the response. Defaults to 20."
+ }
+ }
+ ],
+ "operationId": "getTeams"
+ },
+ "post": {
+ "responses": {
+ "201": {
+ "description": "Teams response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Team"
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "Invalid request",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/InvalidRequestErrorRep"
+ }
+ }
+ }
+ },
+ "401": {
+ "description": "Invalid access token",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/UnauthorizedErrorRep"
+ }
+ }
+ }
+ },
+ "405": {
+ "description": "Method not allowed",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/MethodNotAllowedErrorRep"
+ }
+ }
+ }
+ },
+ "429": {
+ "description": "Rate limited",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/RateLimitedErrorRep"
+ }
+ }
+ }
+ }
+ },
+ "tags": [
+ "Teams"
+ ],
+ "summary": "Create team",
+ "description": "Create a team.",
+ "parameters": [
+ {
+ "name": "expand",
+ "in": "query",
+ "description": "A comma-separated list of properties.",
+ "schema": {
+ "type": "string",
+ "format": "string",
+ "description": "A comma-separated list of properties."
+ }
+ }
+ ],
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/teamPostInput"
+ },
+ "example": {
+ "customRoleKeys": [
+ "example-role1",
+ "example-role2"
+ ],
+ "description": "An example team",
+ "key": "team-key-123abc",
+ "memberIDs": [
+ "12ab3c45de678910fgh12345"
+ ],
+ "name": "Example team"
+ }
+ }
+ },
+ "required": true
+ },
+ "operationId": "postTeam"
+ }
+ },
+ "/api/v2/teams/{teamKey}": {
+ "get": {
+ "responses": {
+ "200": {
+ "description": "Teams response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Team"
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "Invalid request",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/InvalidRequestErrorRep"
+ }
+ }
+ }
+ },
+ "401": {
+ "description": "Invalid access token",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/UnauthorizedErrorRep"
+ }
+ }
+ }
+ },
+ "403": {
+ "description": "Forbidden",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ForbiddenErrorRep"
+ }
+ }
+ }
+ },
+ "404": {
+ "description": "Invalid resource identifier",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/NotFoundErrorRep"
+ }
+ }
+ }
+ },
+ "405": {
+ "description": "Method not allowed",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/MethodNotAllowedErrorRep"
+ }
+ }
+ }
+ },
+ "429": {
+ "description": "Rate limited",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/RateLimitedErrorRep"
+ }
+ }
+ }
+ }
+ },
+ "tags": [
+ "Teams"
+ ],
+ "summary": "Get team",
+ "description": "Get team",
+ "parameters": [
+ {
+ "name": "teamKey",
+ "in": "path",
+ "description": "The team key.",
+ "required": true,
+ "schema": {
+ "type": "string",
+ "format": "string",
+ "description": "The team key."
+ }
+ },
+ {
+ "name": "expand",
+ "in": "query",
+ "description": "A comma-separated list of properties that can reveal additional information in the response.",
+ "schema": {
+ "type": "string",
+ "format": "string",
+ "description": "A comma-separated list of properties that can reveal additional information in the response."
+ }
+ }
+ ],
+ "operationId": "getTeam"
+ },
+ "patch": {
+ "responses": {
+ "200": {
+ "description": "Teams response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Team"
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "Invalid request",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/InvalidRequestErrorRep"
+ }
+ }
+ }
+ },
+ "401": {
+ "description": "Invalid access token",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/UnauthorizedErrorRep"
+ }
+ }
+ }
+ },
+ "404": {
+ "description": "Invalid resource identifier",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/NotFoundErrorRep"
+ }
+ }
+ }
+ },
+ "405": {
+ "description": "Method not allowed",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/MethodNotAllowedErrorRep"
+ }
+ }
+ }
+ },
+ "409": {
+ "description": "Status conflict",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/StatusConflictErrorRep"
+ }
+ }
+ }
+ },
+ "429": {
+ "description": "Rate limited",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/RateLimitedErrorRep"
+ }
+ }
+ }
+ }
+ },
+ "tags": [
+ "Teams"
+ ],
+ "summary": "Update team",
+ "description": "Perform a partial update to a team.",
+ "parameters": [
+ {
+ "name": "teamKey",
+ "in": "path",
+ "description": "The team key",
+ "required": true,
+ "schema": {
+ "type": "string",
+ "format": "string",
+ "description": "The team key"
+ }
+ },
+ {
+ "name": "expand",
+ "in": "query",
+ "description": "A comma-separated list of properties.",
+ "schema": {
+ "type": "string",
+ "format": "string",
+ "description": "A comma-separated list of properties."
+ }
+ }
+ ],
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/teamPatchInput"
+ },
+ "example": {
+ "comment": "Optional comment about the update",
+ "instructions": [
+ {
+ "kind": "updateDescription",
+ "value": "New description for the team"
+ }
+ ]
+ }
+ }
+ },
+ "required": true
+ },
+ "operationId": "patchTeam"
+ },
+ "delete": {
+ "responses": {
+ "204": {
+ "description": "Action succeeded"
+ },
+ "401": {
+ "description": "Invalid access token",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/UnauthorizedErrorRep"
+ }
+ }
+ }
+ },
+ "404": {
+ "description": "Invalid resource identifier",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/NotFoundErrorRep"
+ }
+ }
+ }
+ },
+ "429": {
+ "description": "Rate limited",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/RateLimitedErrorRep"
+ }
+ }
+ }
+ }
+ },
+ "tags": [
+ "Teams"
+ ],
+ "summary": "Delete team",
+ "description": "Delete a team by key.",
+ "parameters": [
+ {
+ "name": "teamKey",
+ "in": "path",
+ "description": "The team key",
+ "required": true,
+ "schema": {
+ "type": "string",
+ "format": "string",
+ "description": "The team key"
+ }
+ }
+ ],
+ "operationId": "deleteTeam"
+ }
+ }
+ },
+ "components": {
+ "schemas": {
+ "Access": {
+ "type": "object",
+ "required": [
+ "denied",
+ "allowed"
+ ],
+ "properties": {
+ "denied": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/AccessDenied"
+ }
+ },
+ "allowed": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/AccessAllowedRep"
+ }
+ }
+ }
+ },
+ "ActionSpecifier": {
+ "type": "string"
+ },
+ "AccessAllowedReason": {
+ "type": "object",
+ "required": [
+ "effect"
+ ],
+ "properties": {
+ "resources": {
+ "type": "array",
+ "description": "Resource specifier strings",
+ "items": {
+ "type": "string"
+ },
+ "example": [
+ "proj/*:env/*;qa_*:/flag/*"
+ ]
+ },
+ "notResources": {
+ "type": "array",
+ "description": "Targeted resources are the resources NOT in this list. The resources
and notActions
fields must be empty to use this field.",
+ "items": {
+ "type": "string"
+ }
+ },
+ "actions": {
+ "type": "array",
+ "description": "Actions to perform on a resource",
+ "items": {
+ "$ref": "#/components/schemas/ActionSpecifier"
+ },
+ "example": [
+ "*"
+ ]
+ },
+ "notActions": {
+ "type": "array",
+ "description": "Targeted actions are the actions NOT in this list. The actions
and notResources
fields must be empty to use this field.",
+ "items": {
+ "$ref": "#/components/schemas/ActionSpecifier"
+ }
+ },
+ "effect": {
+ "type": "string",
+ "description": "Whether this statement should allow or deny actions on the resources.",
+ "example": "allow",
+ "enum": [
+ "allow",
+ "deny"
+ ]
+ },
+ "role_name": {
+ "type": "string"
+ }
+ }
+ },
+ "AccessAllowedRep": {
+ "type": "object",
+ "required": [
+ "action",
+ "reason"
+ ],
+ "properties": {
+ "action": {
+ "$ref": "#/components/schemas/ActionIdentifier"
+ },
+ "reason": {
+ "$ref": "#/components/schemas/AccessAllowedReason"
+ }
+ }
+ },
+ "AccessDenied": {
+ "type": "object",
+ "required": [
+ "action",
+ "reason"
+ ],
+ "properties": {
+ "action": {
+ "$ref": "#/components/schemas/ActionIdentifier"
+ },
+ "reason": {
+ "$ref": "#/components/schemas/AccessDeniedReason"
+ }
+ }
+ },
+ "AccessDeniedReason": {
+ "type": "object",
+ "required": [
+ "effect"
+ ],
+ "properties": {
+ "resources": {
+ "type": "array",
+ "description": "Resource specifier strings",
+ "items": {
+ "type": "string"
+ },
+ "example": [
+ "proj/*:env/*;qa_*:/flag/*"
+ ]
+ },
+ "notResources": {
+ "type": "array",
+ "description": "Targeted resources are the resources NOT in this list. The resources
and notActions
fields must be empty to use this field.",
+ "items": {
+ "type": "string"
+ }
+ },
+ "actions": {
+ "type": "array",
+ "description": "Actions to perform on a resource",
+ "items": {
+ "$ref": "#/components/schemas/ActionSpecifier"
+ },
+ "example": [
+ "*"
+ ]
+ },
+ "notActions": {
+ "type": "array",
+ "description": "Targeted actions are the actions NOT in this list. The actions
and notResources
fields must be empty to use this field.",
+ "items": {
+ "$ref": "#/components/schemas/ActionSpecifier"
+ }
+ },
+ "effect": {
+ "type": "string",
+ "description": "Whether this statement should allow or deny actions on the resources.",
+ "example": "allow",
+ "enum": [
+ "allow",
+ "deny"
+ ]
+ },
+ "role_name": {
+ "type": "string"
+ }
+ }
+ },
+ "ActionIdentifier": {
+ "type": "string"
+ },
+ "BulkEditTeamsRep": {
+ "type": "object",
+ "properties": {
+ "memberIDs": {
+ "type": "array",
+ "description": "A list of member IDs of the members who were added to the teams.",
+ "items": {
+ "type": "string"
+ },
+ "example": [
+ "1234a56b7c89d012345e678f"
+ ]
+ },
+ "teamKeys": {
+ "type": "array",
+ "description": "A list of team keys of the teams that were successfully updated.",
+ "items": {
+ "type": "string"
+ },
+ "example": [
+ "example-team-1"
+ ]
+ },
+ "errors": {
+ "type": "array",
+ "description": "A list of team keys and errors for the teams whose updates failed.",
+ "items": {
+ "type": "object",
+ "additionalProperties": {
+ "type": "string"
+ }
+ },
+ "example": [
+ {
+ "example-team-2": "example failure message"
+ }
+ ]
+ }
+ }
+ },
+ "ForbiddenErrorRep": {
+ "type": "object",
+ "required": [
+ "code",
+ "message"
+ ],
+ "properties": {
+ "code": {
+ "type": "string",
+ "description": "Specific error code encountered",
+ "example": "forbidden"
+ },
+ "message": {
+ "type": "string",
+ "description": "Description of the error",
+ "example": "Forbidden. Access to the requested resource was denied."
+ }
+ }
+ },
+ "Instruction": {
+ "type": "object",
+ "additionalProperties": {}
+ },
+ "Instructions": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/Instruction"
+ }
+ },
+ "InvalidRequestErrorRep": {
+ "type": "object",
+ "required": [
+ "code",
+ "message"
+ ],
+ "properties": {
+ "code": {
+ "type": "string",
+ "description": "Specific error code encountered",
+ "example": "invalid_request"
+ },
+ "message": {
+ "type": "string",
+ "description": "Description of the error",
+ "example": "Invalid request body"
+ }
+ }
+ },
+ "Link": {
+ "type": "object",
+ "properties": {
+ "href": {
+ "type": "string"
+ },
+ "type": {
+ "type": "string"
+ }
+ }
+ },
+ "NotFoundErrorRep": {
+ "type": "object",
+ "required": [
+ "code",
+ "message"
+ ],
+ "properties": {
+ "code": {
+ "type": "string",
+ "description": "Specific error code encountered",
+ "example": "not_found"
+ },
+ "message": {
+ "type": "string",
+ "description": "Description of the error",
+ "example": "Invalid resource identifier"
+ }
+ }
+ },
+ "MemberImportItem": {
+ "type": "object",
+ "required": [
+ "status",
+ "value"
+ ],
+ "properties": {
+ "message": {
+ "type": "string",
+ "description": "An error message, including CSV line number, if the status
is error
"
+ },
+ "status": {
+ "type": "string",
+ "description": "Whether this member can be successfully imported (success
) or not (error
). Even if the status is success
, members are only added to a team on a 201
response.",
+ "example": "error"
+ },
+ "value": {
+ "type": "string",
+ "description": "The email address for the member requested to be added to this team. May be blank or an error, such as 'invalid email format', if the email address cannot be found or parsed.",
+ "example": "new-team-member@acme.com"
+ }
+ }
+ },
+ "MemberSummary": {
+ "type": "object",
+ "required": [
+ "_links",
+ "_id",
+ "role",
+ "email"
+ ],
+ "properties": {
+ "_links": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/components/schemas/Link"
+ },
+ "description": "The location and content type of related resources",
+ "example": {
+ "self": {
+ "href": "/api/v2/members/569f183514f4432160000007",
+ "type": "application/json"
+ }
+ }
+ },
+ "_id": {
+ "type": "string",
+ "description": "The member's ID",
+ "example": "569f183514f4432160000007"
+ },
+ "firstName": {
+ "type": "string",
+ "description": "The member's first name",
+ "example": "Ariel"
+ },
+ "lastName": {
+ "type": "string",
+ "description": "The member's last name",
+ "example": "Flores"
+ },
+ "role": {
+ "type": "string",
+ "description": "The member's built-in role. If the member has no custom roles, this role will be in effect.",
+ "example": "admin"
+ },
+ "email": {
+ "type": "string",
+ "description": "The member's email address",
+ "example": "ariel@acme.com"
+ }
+ }
+ },
+ "MethodNotAllowedErrorRep": {
+ "type": "object",
+ "required": [
+ "code",
+ "message"
+ ],
+ "properties": {
+ "code": {
+ "type": "string",
+ "description": "Specific error code encountered",
+ "example": "method_not_allowed"
+ },
+ "message": {
+ "type": "string",
+ "description": "Description of the error",
+ "example": "Method not allowed"
+ }
+ }
+ },
+ "permissionGrantInput": {
+ "type": "object",
+ "properties": {
+ "actionSet": {
+ "type": "string",
+ "description": "A group of related actions to allow. Specify either actionSet
or actions
. Use maintainTeam
to add team maintainers.",
+ "example": "maintainTeam",
+ "enum": [
+ "maintainTeam"
+ ]
+ },
+ "actions": {
+ "type": "array",
+ "description": "A list of actions to allow. Specify either actionSet
or actions
. To learn more, read [Role actions](https://docs.launchdarkly.com/home/members/role-actions).",
+ "items": {
+ "type": "string"
+ },
+ "example": [
+ "updateTeamMembers"
+ ]
+ },
+ "memberIDs": {
+ "type": "array",
+ "description": "A list of member IDs who receive the permission grant.",
+ "items": {
+ "type": "string"
+ },
+ "example": [
+ "12ab3c45de678910fgh12345"
+ ]
+ }
+ }
+ },
+ "ProjectSummary": {
+ "type": "object",
+ "required": [
+ "_id",
+ "_links",
+ "key",
+ "name"
+ ],
+ "properties": {
+ "_id": {
+ "type": "string",
+ "description": "The ID of this project",
+ "example": "57be1db38b75bf0772d11383"
+ },
+ "_links": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/components/schemas/Link"
+ },
+ "description": "The location and content type of related resources",
+ "example": {
+ "environments": {
+ "href": "/api/v2/projects/example-project/environments",
+ "type": "application/json"
+ },
+ "self": {
+ "href": "/api/v2/projects/example-project",
+ "type": "application/json"
+ }
+ }
+ },
+ "key": {
+ "type": "string",
+ "description": "The project key",
+ "example": "project-key-123abc"
+ },
+ "name": {
+ "type": "string",
+ "description": "The project name",
+ "example": "Example project"
+ }
+ }
+ },
+ "RateLimitedErrorRep": {
+ "type": "object",
+ "required": [
+ "code",
+ "message"
+ ],
+ "properties": {
+ "code": {
+ "type": "string",
+ "description": "Specific error code encountered",
+ "example": "rate_limited"
+ },
+ "message": {
+ "type": "string",
+ "description": "Description of the error",
+ "example": "You've exceeded the API rate limit. Try again later."
+ }
+ }
+ },
+ "StatusConflictErrorRep": {
+ "type": "object",
+ "required": [
+ "code",
+ "message"
+ ],
+ "properties": {
+ "code": {
+ "type": "string",
+ "description": "Specific error code encountered",
+ "example": "optimistic_locking_error"
+ },
+ "message": {
+ "type": "string",
+ "description": "Description of the error",
+ "example": "Conflict. Optimistic lock error. Try again later."
+ }
+ }
+ },
+ "Team": {
+ "type": "object",
+ "properties": {
+ "description": {
+ "type": "string",
+ "description": "A description of the team",
+ "example": "Description for this team."
+ },
+ "key": {
+ "type": "string",
+ "description": "The team key",
+ "example": "team-key-123abc"
+ },
+ "name": {
+ "type": "string",
+ "description": "A human-friendly name for the team",
+ "example": "Example team"
+ },
+ "_access": {
+ "description": "Details on the allowed and denied actions for this team",
+ "$ref": "#/components/schemas/Access"
+ },
+ "_creationDate": {
+ "description": "Timestamp of when the team was created",
+ "example": "1648671956143",
+ "$ref": "#/components/schemas/UnixMillis"
+ },
+ "_links": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/components/schemas/Link"
+ },
+ "description": "The location and content type of related resources",
+ "example": {
+ "parent": {
+ "href": "/api/v2/teams",
+ "type": "application/json"
+ },
+ "roles": {
+ "href": "/api/v2/teams/example-team/roles",
+ "type": "application/json"
+ },
+ "self": {
+ "href": "/api/v2/teams/example-team",
+ "type": "application/json"
+ }
+ }
+ },
+ "_lastModified": {
+ "description": "Timestamp of when the team was most recently updated",
+ "example": "1648672446072",
+ "$ref": "#/components/schemas/UnixMillis"
+ },
+ "_version": {
+ "type": "integer",
+ "description": "The team version",
+ "example": 3
+ },
+ "_idpSynced": {
+ "type": "boolean",
+ "description": "Whether the team has been synced with an external identity provider (IdP). Team sync is available to customers on an Enterprise plan.",
+ "example": true
+ },
+ "roles": {
+ "description": "Paginated list of the custom roles assigned to this team. Only included if specified in the expand
query parameter.",
+ "$ref": "#/components/schemas/TeamCustomRoles"
+ },
+ "members": {
+ "description": "Details on the total count of members that belong to the team. Only included if specified in the expand
query parameter.",
+ "$ref": "#/components/schemas/TeamMembers"
+ },
+ "projects": {
+ "description": "Paginated list of the projects that the team has any write access to. Only included if specified in the expand
query parameter.",
+ "$ref": "#/components/schemas/TeamProjects"
+ },
+ "maintainers": {
+ "description": "Paginated list of the maintainers assigned to this team. Only included if specified in the expand
query parameter.",
+ "$ref": "#/components/schemas/TeamMaintainers"
+ }
+ }
+ },
+ "TeamCustomRole": {
+ "type": "object",
+ "properties": {
+ "key": {
+ "type": "string",
+ "description": "The key of the custom role",
+ "example": "role-key-123abc"
+ },
+ "name": {
+ "type": "string",
+ "description": "The name of the custom role",
+ "example": "Example role"
+ },
+ "projects": {
+ "description": "Details on the projects where team members have write privileges on at least one resource type (e.g. flags)",
+ "$ref": "#/components/schemas/TeamProjects"
+ },
+ "appliedOn": {
+ "description": "Timestamp of when the custom role was assigned to this team",
+ "example": "1648672018410",
+ "$ref": "#/components/schemas/UnixMillis"
+ }
+ }
+ },
+ "TeamCustomRoles": {
+ "type": "object",
+ "properties": {
+ "totalCount": {
+ "type": "integer",
+ "description": "The number of custom roles assigned to this team",
+ "example": 1
+ },
+ "items": {
+ "type": "array",
+ "description": "An array of the custom roles that have been assigned to this team",
+ "items": {
+ "$ref": "#/components/schemas/TeamCustomRole"
+ }
+ },
+ "_links": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/components/schemas/Link"
+ },
+ "description": "The location and content type of related resources",
+ "example": {
+ "self": {
+ "href": "/api/v2/teams/example-team/roles?limit=25",
+ "type": "application/json"
+ }
+ }
+ }
+ }
+ },
+ "TeamImportsRep": {
+ "type": "object",
+ "properties": {
+ "items": {
+ "type": "array",
+ "description": "An array of details about the members requested to be added to this team",
+ "items": {
+ "$ref": "#/components/schemas/MemberImportItem"
+ }
+ }
+ }
+ },
+ "TeamMaintainers": {
+ "type": "object",
+ "properties": {
+ "totalCount": {
+ "type": "integer",
+ "description": "The number of maintainers of the team",
+ "example": 1
+ },
+ "items": {
+ "type": "array",
+ "description": "Details on the members that have been assigned as maintainers of the team",
+ "items": {
+ "$ref": "#/components/schemas/MemberSummary"
+ },
+ "example": [
+ {
+ "_id": "569f183514f4432160000007",
+ "_links": {
+ "self": {
+ "href": "/api/v2/members/569f183514f4432160000007",
+ "type": "application/json"
+ }
+ },
+ "email": "ariel@acme.com",
+ "firstName": "Ariel",
+ "lastName": "Flores",
+ "role": "reader"
+ }
+ ]
+ },
+ "_links": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/components/schemas/Link"
+ },
+ "description": "The location and content type of related resources",
+ "example": {
+ "self": {
+ "href": "/api/v2/teams/example-team/maintainers?limit=5",
+ "type": "application/json"
+ }
+ }
+ }
+ }
+ },
+ "TeamMembers": {
+ "type": "object",
+ "properties": {
+ "totalCount": {
+ "type": "integer",
+ "description": "The total count of members that belong to the team",
+ "example": 15
+ }
+ }
+ },
+ "TeamProjects": {
+ "type": "object",
+ "properties": {
+ "totalCount": {
+ "type": "integer",
+ "example": 1
+ },
+ "items": {
+ "type": "array",
+ "description": "Details on each project where team members have write privileges on at least one resource type (e.g. flags)",
+ "items": {
+ "$ref": "#/components/schemas/ProjectSummary"
+ },
+ "example": [
+ {
+ "_links": {
+ "environments": {
+ "href": "/api/v2/projects/example-project/environments",
+ "type": "application/json"
+ },
+ "self": {
+ "href": "/api/v2/projects/example-project",
+ "type": "application/json"
+ }
+ },
+ "key": "project-key-123abc",
+ "name": "Example project"
+ }
+ ]
+ }
+ }
+ },
+ "Teams": {
+ "type": "object",
+ "properties": {
+ "items": {
+ "type": "array",
+ "description": "An array of teams",
+ "items": {
+ "$ref": "#/components/schemas/Team"
+ }
+ },
+ "_links": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/components/schemas/Link"
+ },
+ "description": "The location and content type of related resources",
+ "example": {
+ "self": {
+ "href": "/api/v2/teams?expand=maintainers%2Cmembers%2Croles%2Cprojects&limit=20",
+ "type": "application/json"
+ }
+ }
+ },
+ "totalCount": {
+ "type": "integer",
+ "description": "The number of teams",
+ "example": 1
+ }
+ }
+ },
+ "teamPatchInput": {
+ "type": "object",
+ "required": [
+ "instructions"
+ ],
+ "properties": {
+ "comment": {
+ "type": "string",
+ "description": "Optional comment describing the update",
+ "example": "Optional comment about the update"
+ },
+ "instructions": {
+ "description": "The instructions to perform when updating. This should be an array with objects that look like {\"kind\": \"update_action\"}
. Some instructions also require additional parameters as part of this object.",
+ "example": "[ { \"kind\": \"updateDescription\", \"value\": \"New description for the team\" } ]",
+ "$ref": "#/components/schemas/Instructions"
+ }
+ }
+ },
+ "teamsPatchInput": {
+ "type": "object",
+ "required": [
+ "instructions"
+ ],
+ "properties": {
+ "comment": {
+ "type": "string",
+ "description": "Optional comment describing the update",
+ "example": "Optional comment about the update"
+ },
+ "instructions": {
+ "description": "The instructions to perform when updating. This should be an array with objects that look like {\"kind\": \"update_action\"}
. Some instructions also require additional parameters as part of this object.",
+ "example": "[ { \"kind\": \"updateDescription\", \"value\": \"New description for the team\" } ]",
+ "$ref": "#/components/schemas/Instructions"
+ }
+ }
+ },
+ "teamPostInput": {
+ "type": "object",
+ "required": [
+ "key",
+ "name"
+ ],
+ "properties": {
+ "customRoleKeys": {
+ "type": "array",
+ "description": "List of custom role keys the team will access",
+ "items": {
+ "type": "string"
+ },
+ "example": [
+ "example-role1",
+ "example-role2"
+ ]
+ },
+ "description": {
+ "type": "string",
+ "description": "A description of the team",
+ "example": "An example team"
+ },
+ "key": {
+ "type": "string",
+ "description": "The team key",
+ "example": "team-key-123abc"
+ },
+ "memberIDs": {
+ "type": "array",
+ "description": "A list of member IDs who belong to the team",
+ "items": {
+ "type": "string"
+ },
+ "example": [
+ "12ab3c45de678910fgh12345"
+ ]
+ },
+ "name": {
+ "type": "string",
+ "description": "A human-friendly name for the team",
+ "example": "Example team"
+ },
+ "permissionGrants": {
+ "type": "array",
+ "description": "A list of permission grants. Permission grants allow access to a specific action, without having to create or update a custom role.",
+ "items": {
+ "$ref": "#/components/schemas/permissionGrantInput"
+ }
+ }
+ }
+ },
+ "UnauthorizedErrorRep": {
+ "type": "object",
+ "required": [
+ "code",
+ "message"
+ ],
+ "properties": {
+ "code": {
+ "type": "string",
+ "description": "Specific error code encountered",
+ "example": "unauthorized"
+ },
+ "message": {
+ "type": "string",
+ "description": "Description of the error",
+ "example": "Invalid access token"
+ }
+ }
+ },
+ "UnixMillis": {
+ "type": "integer",
+ "format": "int64"
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/go.mod b/go.mod
index c916de25..72aabd28 100644
--- a/go.mod
+++ b/go.mod
@@ -7,6 +7,7 @@ require (
github.com/charmbracelet/bubbletea v0.25.0
github.com/charmbracelet/glamour v0.6.0
github.com/charmbracelet/lipgloss v0.10.0
+ github.com/getkin/kin-openapi v0.124.0
github.com/google/uuid v1.6.0
github.com/launchdarkly/api-client-go/v14 v14.0.0
github.com/mitchellh/go-homedir v1.1.0
@@ -28,22 +29,29 @@ require (
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/dlclark/regexp2 v1.4.0 // indirect
github.com/fsnotify/fsnotify v1.7.0 // indirect
+ github.com/go-openapi/jsonpointer v0.20.2 // indirect
+ github.com/go-openapi/swag v0.22.8 // indirect
github.com/golang/protobuf v1.5.3 // indirect
github.com/gorilla/css v1.0.0 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
+ github.com/invopop/yaml v0.2.0 // indirect
+ github.com/josharian/intern v1.0.0 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/magiconair/properties v1.8.7 // indirect
+ github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-isatty v0.0.18 // indirect
github.com/mattn/go-localereader v0.0.1 // indirect
github.com/mattn/go-runewidth v0.0.15 // indirect
github.com/microcosm-cc/bluemonday v1.0.21 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
+ github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect
github.com/muesli/ansi v0.0.0-20211018074035-2e021307bc4b // indirect
github.com/muesli/cancelreader v0.2.2 // indirect
github.com/muesli/termenv v0.15.2 // indirect
github.com/olekukonko/tablewriter v0.0.5 // indirect
github.com/pelletier/go-toml/v2 v2.1.0 // indirect
+ github.com/perimeterx/marshmallow v1.1.5 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/sagikazarmark/locafero v0.4.0 // indirect
diff --git a/go.sum b/go.sum
index 6097aa65..58f11249 100644
--- a/go.sum
+++ b/go.sum
@@ -72,9 +72,16 @@ github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
+github.com/getkin/kin-openapi v0.124.0 h1:VSFNMB9C9rTKBnQ/fpyDU8ytMTr4dWI9QovSKj9kz/M=
+github.com/getkin/kin-openapi v0.124.0/go.mod h1:wb1aSZA/iWmorQP9KTAS/phLj/t17B5jT7+fS8ed9NM=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
+github.com/go-openapi/jsonpointer v0.20.2 h1:mQc3nmndL8ZBzStEo3JYF8wzmeWffDH4VbXz58sAx6Q=
+github.com/go-openapi/jsonpointer v0.20.2/go.mod h1:bHen+N0u1KEO3YlmqOjTT9Adn1RfD91Ar825/PuiRVs=
+github.com/go-openapi/swag v0.22.8 h1:/9RjDSQ0vbFR+NyjGMkFTsA1IA0fmhKSThmfGZjicbw=
+github.com/go-openapi/swag v0.22.8/go.mod h1:6QT22icPLEqAM/z/TChgb4WAveCHF92+2gF0CNjHpPI=
+github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
@@ -136,6 +143,10 @@ github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
+github.com/invopop/yaml v0.2.0 h1:7zky/qH+O0DwAyoobXUqvVBwgBFRxKoQ/3FjcVpjTMY=
+github.com/invopop/yaml v0.2.0/go.mod h1:2XuRLgs/ouIrW3XNzuNj7J3Nvu/Dig5MXvbCEdiBN3Q=
+github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
+github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
@@ -151,6 +162,8 @@ github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
+github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
+github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.18 h1:DOKFKCQ7FNG2L1rbrmstDN4QVRdS89Nkh85u68Uwp98=
github.com/mattn/go-isatty v0.0.18/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
@@ -167,6 +180,8 @@ github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
+github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw=
+github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8=
github.com/muesli/ansi v0.0.0-20211018074035-2e021307bc4b h1:1XF24mVaiu7u+CFywTdcDo2ie1pzzhwjt6RHqzpMU34=
github.com/muesli/ansi v0.0.0-20211018074035-2e021307bc4b/go.mod h1:fQuZ0gauxyBcmsdE3ZT4NasjaRdxmbCS0jRHsrWu3Ho=
github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
@@ -180,6 +195,8 @@ github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
+github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s=
+github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
@@ -191,7 +208,7 @@ github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJ
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
-github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
+github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ=
github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
@@ -226,6 +243,7 @@ github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsT
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
+github.com/ugorji/go/codec v1.2.7 h1:YPXUKf7fYbp/y8xloBqZOw2qaVggbfwMlI8WM3wZUJ0=
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
@@ -498,12 +516,13 @@ google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGm
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
diff --git a/ld-teams-openapi.json b/ld-teams-openapi.json
new file mode 100644
index 00000000..2ab56792
--- /dev/null
+++ b/ld-teams-openapi.json
@@ -0,0 +1,1812 @@
+{
+ "openapi": "3.0.3",
+ "info": {
+ "title": "LaunchDarkly REST API",
+ "description": "# Overview\n\n## Authentication\n\nLaunchDarkly's REST API uses the HTTPS protocol with a minimum TLS version of 1.2.\n\nAll REST API resources are authenticated with either [personal or service access tokens](https://docs.launchdarkly.com/home/account-security/api-access-tokens), or session cookies. Other authentication mechanisms are not supported. You can manage personal access tokens on your [**Account settings**](https://app.launchdarkly.com/settings/tokens) page.\n\nLaunchDarkly also has SDK keys, mobile keys, and client-side IDs that are used by our server-side SDKs, mobile SDKs, and JavaScript-based SDKs, respectively. **These keys cannot be used to access our REST API**. These keys are environment-specific, and can only perform read-only operations such as fetching feature flag settings.\n\n| Auth mechanism | Allowed resources | Use cases |\n| ----------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------- | -------------------------------------------------- |\n| [Personal or service access tokens](https://docs.launchdarkly.com/home/account-security/api-access-tokens) | Can be customized on a per-token basis | Building scripts, custom integrations, data export. |\n| SDK keys | Can only access read-only resources specific to server-side SDKs. Restricted to a single environment. | Server-side SDKs |\n| Mobile keys | Can only access read-only resources specific to mobile SDKs, and only for flags marked available to mobile keys. Restricted to a single environment. | Mobile SDKs |\n| Client-side ID | Can only access read-only resources specific to JavaScript-based client-side SDKs, and only for flags marked available to client-side. Restricted to a single environment. | Client-side JavaScript |\n\n> #### Keep your access tokens and SDK keys private\n>\n> Access tokens should _never_ be exposed in untrusted contexts. Never put an access token in client-side JavaScript, or embed it in a mobile application. LaunchDarkly has special mobile keys that you can embed in mobile apps. If you accidentally expose an access token or SDK key, you can reset it from your [**Account settings**](https://app.launchdarkly.com/settings/tokens) page.\n>\n> The client-side ID is safe to embed in untrusted contexts. It's designed for use in client-side JavaScript.\n\n### Authentication using request header\n\nThe preferred way to authenticate with the API is by adding an `Authorization` header containing your access token to your requests. The value of the `Authorization` header must be your access token.\n\nManage personal access tokens from the [**Account settings**](https://app.launchdarkly.com/settings/tokens) page.\n\n### Authentication using session cookie\n\nFor testing purposes, you can make API calls directly from your web browser. If you are logged in to the LaunchDarkly application, the API will use your existing session to authenticate calls.\n\nIf you have a [role](https://docs.launchdarkly.com/home/team/built-in-roles) other than Admin, or have a [custom role](https://docs.launchdarkly.com/home/team/custom-roles) defined, you may not have permission to perform some API calls. You will receive a `401` response code in that case.\n\n> ### Modifying the Origin header causes an error\n>\n> LaunchDarkly validates that the Origin header for any API request authenticated by a session cookie matches the expected Origin header. The expected Origin header is `https://app.launchdarkly.com`.\n>\n> If the Origin header does not match what's expected, LaunchDarkly returns an error. This error can prevent the LaunchDarkly app from working correctly.\n>\n> Any browser extension that intentionally changes the Origin header can cause this problem. For example, the `Allow-Control-Allow-Origin: *` Chrome extension changes the Origin header to `http://evil.com` and causes the app to fail.\n>\n> To prevent this error, do not modify your Origin header.\n>\n> LaunchDarkly does not require origin matching when authenticating with an access token, so this issue does not affect normal API usage.\n\n## Representations\n\nAll resources expect and return JSON response bodies. Error responses also send a JSON body. To learn more about the error format of the API, read [Errors](/#section/Overview/Errors).\n\nIn practice this means that you always get a response with a `Content-Type` header set to `application/json`.\n\nIn addition, request bodies for `PATCH`, `POST`, and `PUT` requests must be encoded as JSON with a `Content-Type` header set to `application/json`.\n\n### Summary and detailed representations\n\nWhen you fetch a list of resources, the response includes only the most important attributes of each resource. This is a _summary representation_ of the resource. When you fetch an individual resource, such as a single feature flag, you receive a _detailed representation_ of the resource.\n\nThe best way to find a detailed representation is to follow links. Every summary representation includes a link to its detailed representation.\n\n### Expanding responses\n\nSometimes the detailed representation of a resource does not include all of the attributes of the resource by default. If this is the case, the request method will clearly document this and describe which attributes you can include in an expanded response.\n\nTo include the additional attributes, append the `expand` request parameter to your request and add a comma-separated list of the attributes to include. For example, when you append `?expand=members,roles` to the [Get team](/tag/Teams#operation/getTeam) endpoint, the expanded response includes both of these attributes.\n\n### Links and addressability\n\nThe best way to navigate the API is by following links. These are attributes in representations that link to other resources. The API always uses the same format for links:\n\n- Links to other resources within the API are encapsulated in a `_links` object\n- If the resource has a corresponding link to HTML content on the site, it is stored in a special `_site` link\n\nEach link has two attributes:\n\n- An `href`, which contains the URL\n- A `type`, which describes the content type\n\nFor example, a feature resource might return the following:\n\n```json\n{\n \"_links\": {\n \"parent\": {\n \"href\": \"/api/features\",\n \"type\": \"application/json\"\n },\n \"self\": {\n \"href\": \"/api/features/sort.order\",\n \"type\": \"application/json\"\n }\n },\n \"_site\": {\n \"href\": \"/features/sort.order\",\n \"type\": \"text/html\"\n }\n}\n```\n\nFrom this, you can navigate to the parent collection of features by following the `parent` link, or navigate to the site page for the feature by following the `_site` link.\n\nCollections are always represented as a JSON object with an `items` attribute containing an array of representations. Like all other representations, collections have `_links` defined at the top level.\n\nPaginated collections include `first`, `last`, `next`, and `prev` links containing a URL with the respective set of elements in the collection.\n\n## Updates\n\nResources that accept partial updates use the `PATCH` verb. Most resources support the [JSON patch](/reference#updates-using-json-patch) format. Some resources also support the [JSON merge patch](/reference#updates-using-json-merge-patch) format, and some resources support the [semantic patch](/reference#updates-using-semantic-patch) format, which is a way to specify the modifications to perform as a set of executable instructions. Each resource supports optional [comments](/reference#updates-with-comments) that you can submit with updates. Comments appear in outgoing webhooks, the audit log, and other integrations.\n\nWhen a resource supports both JSON patch and semantic patch, we document both in the request method. However, the specific request body fields and descriptions included in our documentation only match one type of patch or the other.\n\n### Updates using JSON patch\n\n[JSON patch](https://datatracker.ietf.org/doc/html/rfc6902) is a way to specify the modifications to perform on a resource. JSON patch uses paths and a limited set of operations to describe how to transform the current state of the resource into a new state. JSON patch documents are always arrays, where each element contains an operation, a path to the field to update, and the new value.\n\nFor example, in this feature flag representation:\n\n```json\n{\n \"name\": \"New recommendations engine\",\n \"key\": \"engine.enable\",\n \"description\": \"This is the description\",\n ...\n}\n```\nYou can change the feature flag's description with the following patch document:\n\n```json\n[{ \"op\": \"replace\", \"path\": \"/description\", \"value\": \"This is the new description\" }]\n```\n\nYou can specify multiple modifications to perform in a single request. You can also test that certain preconditions are met before applying the patch:\n\n```json\n[\n { \"op\": \"test\", \"path\": \"/version\", \"value\": 10 },\n { \"op\": \"replace\", \"path\": \"/description\", \"value\": \"The new description\" }\n]\n```\n\nThe above patch request tests whether the feature flag's `version` is `10`, and if so, changes the feature flag's description.\n\nAttributes that are not editable, such as a resource's `_links`, have names that start with an underscore.\n\n### Updates using JSON merge patch\n\n[JSON merge patch](https://datatracker.ietf.org/doc/html/rfc7386) is another format for specifying the modifications to perform on a resource. JSON merge patch is less expressive than JSON patch. However, in many cases it is simpler to construct a merge patch document. For example, you can change a feature flag's description with the following merge patch document:\n\n```json\n{\n \"description\": \"New flag description\"\n}\n```\n\n### Updates using semantic patch\n\nSome resources support the semantic patch format. A semantic patch is a way to specify the modifications to perform on a resource as a set of executable instructions.\n\nSemantic patch allows you to be explicit about intent using precise, custom instructions. In many cases, you can define semantic patch instructions independently of the current state of the resource. This can be useful when defining a change that may be applied at a future date.\n\nTo make a semantic patch request, you must append `domain-model=launchdarkly.semanticpatch` to your `Content-Type` header.\n\nHere's how:\n\n```\nContent-Type: application/json; domain-model=launchdarkly.semanticpatch\n```\n\nIf you call a semantic patch resource without this header, you will receive a `400` response because your semantic patch will be interpreted as a JSON patch.\n\nThe body of a semantic patch request takes the following properties:\n\n* `comment` (string): (Optional) A description of the update.\n* `environmentKey` (string): (Required for some resources only) The environment key.\n* `instructions` (array): (Required) A list of actions the update should perform. Each action in the list must be an object with a `kind` property that indicates the instruction. If the instruction requires parameters, you must include those parameters as additional fields in the object. The documentation for each resource that supports semantic patch includes the available instructions and any additional parameters.\n\nFor example:\n\n```json\n{\n \"comment\": \"optional comment\",\n \"instructions\": [ {\"kind\": \"turnFlagOn\"} ]\n}\n```\n\nIf any instruction in the patch encounters an error, the endpoint returns an error and will not change the resource. In general, each instruction silently does nothing if the resource is already in the state you request.\n\n### Updates with comments\n\nYou can submit optional comments with `PATCH` changes.\n\nTo submit a comment along with a JSON patch document, use the following format:\n\n```json\n{\n \"comment\": \"This is a comment string\",\n \"patch\": [{ \"op\": \"replace\", \"path\": \"/description\", \"value\": \"The new description\" }]\n}\n```\n\nTo submit a comment along with a JSON merge patch document, use the following format:\n\n```json\n{\n \"comment\": \"This is a comment string\",\n \"merge\": { \"description\": \"New flag description\" }\n}\n```\n\nTo submit a comment along with a semantic patch, use the following format:\n\n```json\n{\n \"comment\": \"This is a comment string\",\n \"instructions\": [ {\"kind\": \"turnFlagOn\"} ]\n}\n```\n\n## Errors\n\nThe API always returns errors in a common format. Here's an example:\n\n```json\n{\n \"code\": \"invalid_request\",\n \"message\": \"A feature with that key already exists\",\n \"id\": \"30ce6058-87da-11e4-b116-123b93f75cba\"\n}\n```\n\nThe `code` indicates the general class of error. The `message` is a human-readable explanation of what went wrong. The `id` is a unique identifier. Use it when you're working with LaunchDarkly Support to debug a problem with a specific API call.\n\n### HTTP status error response codes\n\n| Code | Definition | Description | Possible Solution |\n| ---- | ----------------- | ------------------------------------------------------------------------------------------- | ---------------------------------------------------------------- |\n| 400 | Invalid request | The request cannot be understood. | Ensure JSON syntax in request body is correct. |\n| 401 | Invalid access token | Requestor is unauthorized or does not have permission for this API call. | Ensure your API access token is valid and has the appropriate permissions. |\n| 403 | Forbidden | Requestor does not have access to this resource. | Ensure that the account member or access token has proper permissions set. |\n| 404 | Invalid resource identifier | The requested resource is not valid. | Ensure that the resource is correctly identified by ID or key. |\n| 405 | Method not allowed | The request method is not allowed on this resource. | Ensure that the HTTP verb is correct. |\n| 409 | Conflict | The API request can not be completed because it conflicts with a concurrent API request. | Retry your request. |\n| 422 | Unprocessable entity | The API request can not be completed because the update description can not be understood. | Ensure that the request body is correct for the type of patch you are using, either JSON patch or semantic patch.\n| 429 | Too many requests | Read [Rate limiting](/#section/Overview/Rate-limiting). | Wait and try again later. |\n\n## CORS\n\nThe LaunchDarkly API supports Cross Origin Resource Sharing (CORS) for AJAX requests from any origin. If an `Origin` header is given in a request, it will be echoed as an explicitly allowed origin. Otherwise the request returns a wildcard, `Access-Control-Allow-Origin: *`. For more information on CORS, read the [CORS W3C Recommendation](http://www.w3.org/TR/cors). Example CORS headers might look like:\n\n```http\nAccess-Control-Allow-Headers: Accept, Content-Type, Content-Length, Accept-Encoding, Authorization\nAccess-Control-Allow-Methods: OPTIONS, GET, DELETE, PATCH\nAccess-Control-Allow-Origin: *\nAccess-Control-Max-Age: 300\n```\n\nYou can make authenticated CORS calls just as you would make same-origin calls, using either [token or session-based authentication](/#section/Overview/Authentication). If you are using session authentication, you should set the `withCredentials` property for your `xhr` request to `true`. You should never expose your access tokens to untrusted entities.\n\n## Rate limiting\n\nWe use several rate limiting strategies to ensure the availability of our APIs. Rate-limited calls to our APIs return a `429` status code. Calls to our APIs include headers indicating the current rate limit status. The specific headers returned depend on the API route being called. The limits differ based on the route, authentication mechanism, and other factors. Routes that are not rate limited may not contain any of the headers described below.\n\n> ### Rate limiting and SDKs\n>\n> LaunchDarkly SDKs are never rate limited and do not use the API endpoints defined here. LaunchDarkly uses a different set of approaches, including streaming/server-sent events and a global CDN, to ensure availability to the routes used by LaunchDarkly SDKs.\n\n### Global rate limits\n\nAuthenticated requests are subject to a global limit. This is the maximum number of calls that your account can make to the API per ten seconds. All service and personal access tokens on the account share this limit, so exceeding the limit with one access token will impact other tokens. Calls that are subject to global rate limits may return the headers below:\n\n| Header name | Description |\n| ------------------------------ | -------------------------------------------------------------------------------- |\n| `X-Ratelimit-Global-Remaining` | The maximum number of requests the account is permitted to make per ten seconds. |\n| `X-Ratelimit-Reset` | The time at which the current rate limit window resets in epoch milliseconds. |\n\nWe do not publicly document the specific number of calls that can be made globally. This limit may change, and we encourage clients to program against the specification, relying on the two headers defined above, rather than hardcoding to the current limit.\n\n### Route-level rate limits\n\nSome authenticated routes have custom rate limits. These also reset every ten seconds. Any service or personal access tokens hitting the same route share this limit, so exceeding the limit with one access token may impact other tokens. Calls that are subject to route-level rate limits return the headers below:\n\n| Header name | Description |\n| ----------------------------- | ----------------------------------------------------------------------------------------------------- |\n| `X-Ratelimit-Route-Remaining` | The maximum number of requests to the current route the account is permitted to make per ten seconds. |\n| `X-Ratelimit-Reset` | The time at which the current rate limit window resets in epoch milliseconds. |\n\nA _route_ represents a specific URL pattern and verb. For example, the [Delete environment](/tag/Environments#operation/deleteEnvironment) endpoint is considered a single route, and each call to delete an environment counts against your route-level rate limit for that route.\n\nWe do not publicly document the specific number of calls that an account can make to each endpoint per ten seconds. These limits may change, and we encourage clients to program against the specification, relying on the two headers defined above, rather than hardcoding to the current limits.\n\n### IP-based rate limiting\n\nWe also employ IP-based rate limiting on some API routes. If you hit an IP-based rate limit, your API response will include a `Retry-After` header indicating how long to wait before re-trying the call. Clients must wait at least `Retry-After` seconds before making additional calls to our API, and should employ jitter and backoff strategies to avoid triggering rate limits again.\n\n## OpenAPI (Swagger) and client libraries\n\nWe have a [complete OpenAPI (Swagger) specification](https://app.launchdarkly.com/api/v2/openapi.json) for our API.\n\nWe auto-generate multiple client libraries based on our OpenAPI specification. To learn more, visit the [collection of client libraries on GitHub](https://github.com/search?q=topic%3Alaunchdarkly-api+org%3Alaunchdarkly&type=Repositories). You can also use this specification to generate client libraries to interact with our REST API in your language of choice.\n\nOur OpenAPI specification is supported by several API-based tools such as Postman and Insomnia. In many cases, you can directly import our specification to explore our APIs.\n\n## Method overriding\n\nSome firewalls and HTTP clients restrict the use of verbs other than `GET` and `POST`. In those environments, our API endpoints that use `DELETE`, `PATCH`, and `PUT` verbs are inaccessible.\n\nTo avoid this issue, our API supports the `X-HTTP-Method-Override` header, allowing clients to \"tunnel\" `DELETE`, `PATCH`, and `PUT` requests using a `POST` request.\n\nFor example, to call a `PATCH` endpoint using a `POST` request, you can include `X-HTTP-Method-Override:PATCH` as a header.\n\n## Beta resources\n\nWe sometimes release new API resources in **beta** status before we release them with general availability.\n\nResources that are in beta are still undergoing testing and development. They may change without notice, including becoming backwards incompatible.\n\nWe try to promote resources into general availability as quickly as possible. This happens after sufficient testing and when we're satisfied that we no longer need to make backwards-incompatible changes.\n\nWe mark beta resources with a \"Beta\" callout in our documentation, pictured below:\n\n> ### This feature is in beta\n>\n> To use this feature, pass in a header including the `LD-API-Version` key with value set to `beta`. Use this header with each call. To learn more, read [Beta resources](/#section/Overview/Beta-resources).\n>\n> Resources that are in beta are still undergoing testing and development. They may change without notice, including becoming backwards incompatible.\n\n### Using beta resources\n\nTo use a beta resource, you must include a header in the request. If you call a beta resource without this header, you receive a `403` response.\n\nUse this header:\n\n```\nLD-API-Version: beta\n```\n\n## Federal environments\n\nThe version of LaunchDarkly that is available on domains controlled by the United States government is different from the version of LaunchDarkly available to the general public. If you are an employee or contractor for a United States federal agency and use LaunchDarkly in your work, you likely use the federal instance of LaunchDarkly.\n\nIf you are working in the federal instance of LaunchDarkly, the base URI for each request is `https://app.launchdarkly.us`. In the \"Try it\" sandbox for each request, click the request path to view the complete resource path for the federal environment.\n\nTo learn more, read [LaunchDarkly in federal environments](https://docs.launchdarkly.com/home/advanced/federal).\n\n## Versioning\n\nWe try hard to keep our REST API backwards compatible, but we occasionally have to make backwards-incompatible changes in the process of shipping new features. These breaking changes can cause unexpected behavior if you don't prepare for them accordingly.\n\nUpdates to our REST API include support for the latest features in LaunchDarkly. We also release a new version of our REST API every time we make a breaking change. We provide simultaneous support for multiple API versions so you can migrate from your current API version to a new version at your own pace.\n\n### Setting the API version per request\n\nYou can set the API version on a specific request by sending an `LD-API-Version` header, as shown in the example below:\n\n```\nLD-API-Version: 20220603\n```\n\nThe header value is the version number of the API version you would like to request. The number for each version corresponds to the date the version was released in `yyyymmdd` format. In the example above the version `20220603` corresponds to June 03, 2022.\n\n### Setting the API version per access token\n\nWhen you create an access token, you must specify a specific version of the API to use. This ensures that integrations using this token cannot be broken by version changes.\n\nTokens created before versioning was released have their version set to `20160426`, which is the version of the API that existed before the current versioning scheme, so that they continue working the same way they did before versioning.\n\nIf you would like to upgrade your integration to use a new API version, you can explicitly set the header described above.\n\n> ### Best practice: Set the header for every client or integration\n>\n> We recommend that you set the API version header explicitly in any client or integration you build.\n>\n> Only rely on the access token API version during manual testing.\n\n### API version changelog\n\n|
resources
and notActions
fields must be empty to use this field.",
+ "items": {
+ "type": "string"
+ }
+ },
+ "actions": {
+ "type": "array",
+ "description": "Actions to perform on a resource",
+ "items": {
+ "$ref": "#/components/schemas/ActionSpecifier"
+ },
+ "example": [
+ "*"
+ ]
+ },
+ "notActions": {
+ "type": "array",
+ "description": "Targeted actions are the actions NOT in this list. The actions
and notResources
fields must be empty to use this field.",
+ "items": {
+ "$ref": "#/components/schemas/ActionSpecifier"
+ }
+ },
+ "effect": {
+ "type": "string",
+ "description": "Whether this statement should allow or deny actions on the resources.",
+ "example": "allow",
+ "enum": [
+ "allow",
+ "deny"
+ ]
+ },
+ "role_name": {
+ "type": "string"
+ }
+ }
+ },
+ "AccessAllowedRep": {
+ "type": "object",
+ "required": [
+ "action",
+ "reason"
+ ],
+ "properties": {
+ "action": {
+ "$ref": "#/components/schemas/ActionIdentifier"
+ },
+ "reason": {
+ "$ref": "#/components/schemas/AccessAllowedReason"
+ }
+ }
+ },
+ "AccessDenied": {
+ "type": "object",
+ "required": [
+ "action",
+ "reason"
+ ],
+ "properties": {
+ "action": {
+ "$ref": "#/components/schemas/ActionIdentifier"
+ },
+ "reason": {
+ "$ref": "#/components/schemas/AccessDeniedReason"
+ }
+ }
+ },
+ "AccessDeniedReason": {
+ "type": "object",
+ "required": [
+ "effect"
+ ],
+ "properties": {
+ "resources": {
+ "type": "array",
+ "description": "Resource specifier strings",
+ "items": {
+ "type": "string"
+ },
+ "example": [
+ "proj/*:env/*;qa_*:/flag/*"
+ ]
+ },
+ "notResources": {
+ "type": "array",
+ "description": "Targeted resources are the resources NOT in this list. The resources
and notActions
fields must be empty to use this field.",
+ "items": {
+ "type": "string"
+ }
+ },
+ "actions": {
+ "type": "array",
+ "description": "Actions to perform on a resource",
+ "items": {
+ "$ref": "#/components/schemas/ActionSpecifier"
+ },
+ "example": [
+ "*"
+ ]
+ },
+ "notActions": {
+ "type": "array",
+ "description": "Targeted actions are the actions NOT in this list. The actions
and notResources
fields must be empty to use this field.",
+ "items": {
+ "$ref": "#/components/schemas/ActionSpecifier"
+ }
+ },
+ "effect": {
+ "type": "string",
+ "description": "Whether this statement should allow or deny actions on the resources.",
+ "example": "allow",
+ "enum": [
+ "allow",
+ "deny"
+ ]
+ },
+ "role_name": {
+ "type": "string"
+ }
+ }
+ },
+ "ActionIdentifier": {
+ "type": "string"
+ },
+ "BulkEditTeamsRep": {
+ "type": "object",
+ "properties": {
+ "memberIDs": {
+ "type": "array",
+ "description": "A list of member IDs of the members who were added to the teams.",
+ "items": {
+ "type": "string"
+ },
+ "example": [
+ "1234a56b7c89d012345e678f"
+ ]
+ },
+ "teamKeys": {
+ "type": "array",
+ "description": "A list of team keys of the teams that were successfully updated.",
+ "items": {
+ "type": "string"
+ },
+ "example": [
+ "example-team-1"
+ ]
+ },
+ "errors": {
+ "type": "array",
+ "description": "A list of team keys and errors for the teams whose updates failed.",
+ "items": {
+ "type": "object",
+ "additionalProperties": {
+ "type": "string"
+ }
+ },
+ "example": [
+ {
+ "example-team-2": "example failure message"
+ }
+ ]
+ }
+ }
+ },
+ "ForbiddenErrorRep": {
+ "type": "object",
+ "required": [
+ "code",
+ "message"
+ ],
+ "properties": {
+ "code": {
+ "type": "string",
+ "description": "Specific error code encountered",
+ "example": "forbidden"
+ },
+ "message": {
+ "type": "string",
+ "description": "Description of the error",
+ "example": "Forbidden. Access to the requested resource was denied."
+ }
+ }
+ },
+ "Instruction": {
+ "type": "object",
+ "additionalProperties": {}
+ },
+ "Instructions": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/Instruction"
+ }
+ },
+ "InvalidRequestErrorRep": {
+ "type": "object",
+ "required": [
+ "code",
+ "message"
+ ],
+ "properties": {
+ "code": {
+ "type": "string",
+ "description": "Specific error code encountered",
+ "example": "invalid_request"
+ },
+ "message": {
+ "type": "string",
+ "description": "Description of the error",
+ "example": "Invalid request body"
+ }
+ }
+ },
+ "Link": {
+ "type": "object",
+ "properties": {
+ "href": {
+ "type": "string"
+ },
+ "type": {
+ "type": "string"
+ }
+ }
+ },
+ "NotFoundErrorRep": {
+ "type": "object",
+ "required": [
+ "code",
+ "message"
+ ],
+ "properties": {
+ "code": {
+ "type": "string",
+ "description": "Specific error code encountered",
+ "example": "not_found"
+ },
+ "message": {
+ "type": "string",
+ "description": "Description of the error",
+ "example": "Invalid resource identifier"
+ }
+ }
+ },
+ "MemberImportItem": {
+ "type": "object",
+ "required": [
+ "status",
+ "value"
+ ],
+ "properties": {
+ "message": {
+ "type": "string",
+ "description": "An error message, including CSV line number, if the status
is error
"
+ },
+ "status": {
+ "type": "string",
+ "description": "Whether this member can be successfully imported (success
) or not (error
). Even if the status is success
, members are only added to a team on a 201
response.",
+ "example": "error"
+ },
+ "value": {
+ "type": "string",
+ "description": "The email address for the member requested to be added to this team. May be blank or an error, such as 'invalid email format', if the email address cannot be found or parsed.",
+ "example": "new-team-member@acme.com"
+ }
+ }
+ },
+ "MemberSummary": {
+ "type": "object",
+ "required": [
+ "_links",
+ "_id",
+ "role",
+ "email"
+ ],
+ "properties": {
+ "_links": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/components/schemas/Link"
+ },
+ "description": "The location and content type of related resources",
+ "example": {
+ "self": {
+ "href": "/api/v2/members/569f183514f4432160000007",
+ "type": "application/json"
+ }
+ }
+ },
+ "_id": {
+ "type": "string",
+ "description": "The member's ID",
+ "example": "569f183514f4432160000007"
+ },
+ "firstName": {
+ "type": "string",
+ "description": "The member's first name",
+ "example": "Ariel"
+ },
+ "lastName": {
+ "type": "string",
+ "description": "The member's last name",
+ "example": "Flores"
+ },
+ "role": {
+ "type": "string",
+ "description": "The member's built-in role. If the member has no custom roles, this role will be in effect.",
+ "example": "admin"
+ },
+ "email": {
+ "type": "string",
+ "description": "The member's email address",
+ "example": "ariel@acme.com"
+ }
+ }
+ },
+ "MethodNotAllowedErrorRep": {
+ "type": "object",
+ "required": [
+ "code",
+ "message"
+ ],
+ "properties": {
+ "code": {
+ "type": "string",
+ "description": "Specific error code encountered",
+ "example": "method_not_allowed"
+ },
+ "message": {
+ "type": "string",
+ "description": "Description of the error",
+ "example": "Method not allowed"
+ }
+ }
+ },
+ "permissionGrantInput": {
+ "type": "object",
+ "properties": {
+ "actionSet": {
+ "type": "string",
+ "description": "A group of related actions to allow. Specify either actionSet
or actions
. Use maintainTeam
to add team maintainers.",
+ "example": "maintainTeam",
+ "enum": [
+ "maintainTeam"
+ ]
+ },
+ "actions": {
+ "type": "array",
+ "description": "A list of actions to allow. Specify either actionSet
or actions
. To learn more, read [Role actions](https://docs.launchdarkly.com/home/members/role-actions).",
+ "items": {
+ "type": "string"
+ },
+ "example": [
+ "updateTeamMembers"
+ ]
+ },
+ "memberIDs": {
+ "type": "array",
+ "description": "A list of member IDs who receive the permission grant.",
+ "items": {
+ "type": "string"
+ },
+ "example": [
+ "12ab3c45de678910fgh12345"
+ ]
+ }
+ }
+ },
+ "ProjectSummary": {
+ "type": "object",
+ "required": [
+ "_id",
+ "_links",
+ "key",
+ "name"
+ ],
+ "properties": {
+ "_id": {
+ "type": "string",
+ "description": "The ID of this project",
+ "example": "57be1db38b75bf0772d11383"
+ },
+ "_links": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/components/schemas/Link"
+ },
+ "description": "The location and content type of related resources",
+ "example": {
+ "environments": {
+ "href": "/api/v2/projects/example-project/environments",
+ "type": "application/json"
+ },
+ "self": {
+ "href": "/api/v2/projects/example-project",
+ "type": "application/json"
+ }
+ }
+ },
+ "key": {
+ "type": "string",
+ "description": "The project key",
+ "example": "project-key-123abc"
+ },
+ "name": {
+ "type": "string",
+ "description": "The project name",
+ "example": "Example project"
+ }
+ }
+ },
+ "RateLimitedErrorRep": {
+ "type": "object",
+ "required": [
+ "code",
+ "message"
+ ],
+ "properties": {
+ "code": {
+ "type": "string",
+ "description": "Specific error code encountered",
+ "example": "rate_limited"
+ },
+ "message": {
+ "type": "string",
+ "description": "Description of the error",
+ "example": "You've exceeded the API rate limit. Try again later."
+ }
+ }
+ },
+ "StatusConflictErrorRep": {
+ "type": "object",
+ "required": [
+ "code",
+ "message"
+ ],
+ "properties": {
+ "code": {
+ "type": "string",
+ "description": "Specific error code encountered",
+ "example": "optimistic_locking_error"
+ },
+ "message": {
+ "type": "string",
+ "description": "Description of the error",
+ "example": "Conflict. Optimistic lock error. Try again later."
+ }
+ }
+ },
+ "Team": {
+ "type": "object",
+ "properties": {
+ "description": {
+ "type": "string",
+ "description": "A description of the team",
+ "example": "Description for this team."
+ },
+ "key": {
+ "type": "string",
+ "description": "The team key",
+ "example": "team-key-123abc"
+ },
+ "name": {
+ "type": "string",
+ "description": "A human-friendly name for the team",
+ "example": "Example team"
+ },
+ "_access": {
+ "description": "Details on the allowed and denied actions for this team",
+ "$ref": "#/components/schemas/Access"
+ },
+ "_creationDate": {
+ "description": "Timestamp of when the team was created",
+ "example": "1648671956143",
+ "$ref": "#/components/schemas/UnixMillis"
+ },
+ "_links": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/components/schemas/Link"
+ },
+ "description": "The location and content type of related resources",
+ "example": {
+ "parent": {
+ "href": "/api/v2/teams",
+ "type": "application/json"
+ },
+ "roles": {
+ "href": "/api/v2/teams/example-team/roles",
+ "type": "application/json"
+ },
+ "self": {
+ "href": "/api/v2/teams/example-team",
+ "type": "application/json"
+ }
+ }
+ },
+ "_lastModified": {
+ "description": "Timestamp of when the team was most recently updated",
+ "example": "1648672446072",
+ "$ref": "#/components/schemas/UnixMillis"
+ },
+ "_version": {
+ "type": "integer",
+ "description": "The team version",
+ "example": 3
+ },
+ "_idpSynced": {
+ "type": "boolean",
+ "description": "Whether the team has been synced with an external identity provider (IdP). Team sync is available to customers on an Enterprise plan.",
+ "example": true
+ },
+ "roles": {
+ "description": "Paginated list of the custom roles assigned to this team. Only included if specified in the expand
query parameter.",
+ "$ref": "#/components/schemas/TeamCustomRoles"
+ },
+ "members": {
+ "description": "Details on the total count of members that belong to the team. Only included if specified in the expand
query parameter.",
+ "$ref": "#/components/schemas/TeamMembers"
+ },
+ "projects": {
+ "description": "Paginated list of the projects that the team has any write access to. Only included if specified in the expand
query parameter.",
+ "$ref": "#/components/schemas/TeamProjects"
+ },
+ "maintainers": {
+ "description": "Paginated list of the maintainers assigned to this team. Only included if specified in the expand
query parameter.",
+ "$ref": "#/components/schemas/TeamMaintainers"
+ }
+ }
+ },
+ "TeamCustomRole": {
+ "type": "object",
+ "properties": {
+ "key": {
+ "type": "string",
+ "description": "The key of the custom role",
+ "example": "role-key-123abc"
+ },
+ "name": {
+ "type": "string",
+ "description": "The name of the custom role",
+ "example": "Example role"
+ },
+ "projects": {
+ "description": "Details on the projects where team members have write privileges on at least one resource type (e.g. flags)",
+ "$ref": "#/components/schemas/TeamProjects"
+ },
+ "appliedOn": {
+ "description": "Timestamp of when the custom role was assigned to this team",
+ "example": "1648672018410",
+ "$ref": "#/components/schemas/UnixMillis"
+ }
+ }
+ },
+ "TeamCustomRoles": {
+ "type": "object",
+ "properties": {
+ "totalCount": {
+ "type": "integer",
+ "description": "The number of custom roles assigned to this team",
+ "example": 1
+ },
+ "items": {
+ "type": "array",
+ "description": "An array of the custom roles that have been assigned to this team",
+ "items": {
+ "$ref": "#/components/schemas/TeamCustomRole"
+ }
+ },
+ "_links": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/components/schemas/Link"
+ },
+ "description": "The location and content type of related resources",
+ "example": {
+ "self": {
+ "href": "/api/v2/teams/example-team/roles?limit=25",
+ "type": "application/json"
+ }
+ }
+ }
+ }
+ },
+ "TeamImportsRep": {
+ "type": "object",
+ "properties": {
+ "items": {
+ "type": "array",
+ "description": "An array of details about the members requested to be added to this team",
+ "items": {
+ "$ref": "#/components/schemas/MemberImportItem"
+ }
+ }
+ }
+ },
+ "TeamMaintainers": {
+ "type": "object",
+ "properties": {
+ "totalCount": {
+ "type": "integer",
+ "description": "The number of maintainers of the team",
+ "example": 1
+ },
+ "items": {
+ "type": "array",
+ "description": "Details on the members that have been assigned as maintainers of the team",
+ "items": {
+ "$ref": "#/components/schemas/MemberSummary"
+ },
+ "example": [
+ {
+ "_id": "569f183514f4432160000007",
+ "_links": {
+ "self": {
+ "href": "/api/v2/members/569f183514f4432160000007",
+ "type": "application/json"
+ }
+ },
+ "email": "ariel@acme.com",
+ "firstName": "Ariel",
+ "lastName": "Flores",
+ "role": "reader"
+ }
+ ]
+ },
+ "_links": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/components/schemas/Link"
+ },
+ "description": "The location and content type of related resources",
+ "example": {
+ "self": {
+ "href": "/api/v2/teams/example-team/maintainers?limit=5",
+ "type": "application/json"
+ }
+ }
+ }
+ }
+ },
+ "TeamMembers": {
+ "type": "object",
+ "properties": {
+ "totalCount": {
+ "type": "integer",
+ "description": "The total count of members that belong to the team",
+ "example": 15
+ }
+ }
+ },
+ "TeamProjects": {
+ "type": "object",
+ "properties": {
+ "totalCount": {
+ "type": "integer",
+ "example": 1
+ },
+ "items": {
+ "type": "array",
+ "description": "Details on each project where team members have write privileges on at least one resource type (e.g. flags)",
+ "items": {
+ "$ref": "#/components/schemas/ProjectSummary"
+ },
+ "example": [
+ {
+ "_links": {
+ "environments": {
+ "href": "/api/v2/projects/example-project/environments",
+ "type": "application/json"
+ },
+ "self": {
+ "href": "/api/v2/projects/example-project",
+ "type": "application/json"
+ }
+ },
+ "key": "project-key-123abc",
+ "name": "Example project"
+ }
+ ]
+ }
+ }
+ },
+ "Teams": {
+ "type": "object",
+ "properties": {
+ "items": {
+ "type": "array",
+ "description": "An array of teams",
+ "items": {
+ "$ref": "#/components/schemas/Team"
+ }
+ },
+ "_links": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/components/schemas/Link"
+ },
+ "description": "The location and content type of related resources",
+ "example": {
+ "self": {
+ "href": "/api/v2/teams?expand=maintainers%2Cmembers%2Croles%2Cprojects&limit=20",
+ "type": "application/json"
+ }
+ }
+ },
+ "totalCount": {
+ "type": "integer",
+ "description": "The number of teams",
+ "example": 1
+ }
+ }
+ },
+ "teamPatchInput": {
+ "type": "object",
+ "required": [
+ "instructions"
+ ],
+ "properties": {
+ "comment": {
+ "type": "string",
+ "description": "Optional comment describing the update",
+ "example": "Optional comment about the update"
+ },
+ "instructions": {
+ "description": "The instructions to perform when updating. This should be an array with objects that look like {\"kind\": \"update_action\"}
. Some instructions also require additional parameters as part of this object.",
+ "example": "[ { \"kind\": \"updateDescription\", \"value\": \"New description for the team\" } ]",
+ "$ref": "#/components/schemas/Instructions"
+ }
+ }
+ },
+ "teamsPatchInput": {
+ "type": "object",
+ "required": [
+ "instructions"
+ ],
+ "properties": {
+ "comment": {
+ "type": "string",
+ "description": "Optional comment describing the update",
+ "example": "Optional comment about the update"
+ },
+ "instructions": {
+ "description": "The instructions to perform when updating. This should be an array with objects that look like {\"kind\": \"update_action\"}
. Some instructions also require additional parameters as part of this object.",
+ "example": "[ { \"kind\": \"updateDescription\", \"value\": \"New description for the team\" } ]",
+ "$ref": "#/components/schemas/Instructions"
+ }
+ }
+ },
+ "teamPostInput": {
+ "type": "object",
+ "required": [
+ "key",
+ "name"
+ ],
+ "properties": {
+ "customRoleKeys": {
+ "type": "array",
+ "description": "List of custom role keys the team will access",
+ "items": {
+ "type": "string"
+ },
+ "example": [
+ "example-role1",
+ "example-role2"
+ ]
+ },
+ "description": {
+ "type": "string",
+ "description": "A description of the team",
+ "example": "An example team"
+ },
+ "key": {
+ "type": "string",
+ "description": "The team key",
+ "example": "team-key-123abc"
+ },
+ "memberIDs": {
+ "type": "array",
+ "description": "A list of member IDs who belong to the team",
+ "items": {
+ "type": "string"
+ },
+ "example": [
+ "12ab3c45de678910fgh12345"
+ ]
+ },
+ "name": {
+ "type": "string",
+ "description": "A human-friendly name for the team",
+ "example": "Example team"
+ },
+ "permissionGrants": {
+ "type": "array",
+ "description": "A list of permission grants. Permission grants allow access to a specific action, without having to create or update a custom role.",
+ "items": {
+ "$ref": "#/components/schemas/permissionGrantInput"
+ }
+ }
+ }
+ },
+ "UnauthorizedErrorRep": {
+ "type": "object",
+ "required": [
+ "code",
+ "message"
+ ],
+ "properties": {
+ "code": {
+ "type": "string",
+ "description": "Specific error code encountered",
+ "example": "unauthorized"
+ },
+ "message": {
+ "type": "string",
+ "description": "Description of the error",
+ "example": "Invalid access token"
+ }
+ }
+ },
+ "UnixMillis": {
+ "type": "integer",
+ "format": "int64"
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/tools.go b/tools.go
new file mode 100644
index 00000000..06ab7d0f
--- /dev/null
+++ b/tools.go
@@ -0,0 +1 @@
+package main
diff --git a/vendor/github.com/getkin/kin-openapi/LICENSE b/vendor/github.com/getkin/kin-openapi/LICENSE
new file mode 100644
index 00000000..992b9831
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2017-2018 the project authors.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/callback.go b/vendor/github.com/getkin/kin-openapi/openapi3/callback.go
new file mode 100644
index 00000000..13532b15
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/callback.go
@@ -0,0 +1,54 @@
+package openapi3
+
+import (
+ "context"
+ "sort"
+)
+
+// Callback is specified by OpenAPI/Swagger standard version 3.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#callback-object
+type Callback struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ m map[string]*PathItem
+}
+
+// NewCallback builds a Callback object with path items in insertion order.
+func NewCallback(opts ...NewCallbackOption) *Callback {
+ Callback := NewCallbackWithCapacity(len(opts))
+ for _, opt := range opts {
+ opt(Callback)
+ }
+ return Callback
+}
+
+// NewCallbackOption describes options to NewCallback func
+type NewCallbackOption func(*Callback)
+
+// WithCallback adds Callback as an option to NewCallback
+func WithCallback(cb string, pathItem *PathItem) NewCallbackOption {
+ return func(callback *Callback) {
+ if p := pathItem; p != nil && cb != "" {
+ callback.Set(cb, p)
+ }
+ }
+}
+
+// Validate returns an error if Callback does not comply with the OpenAPI spec.
+func (callback *Callback) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ keys := make([]string, 0, callback.Len())
+ for key := range callback.Map() {
+ keys = append(keys, key)
+ }
+ sort.Strings(keys)
+ for _, key := range keys {
+ v := callback.Value(key)
+ if err := v.Validate(ctx); err != nil {
+ return err
+ }
+ }
+
+ return validateExtensions(ctx, callback.Extensions)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/components.go b/vendor/github.com/getkin/kin-openapi/openapi3/components.go
new file mode 100644
index 00000000..656ea193
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/components.go
@@ -0,0 +1,361 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "sort"
+
+ "github.com/go-openapi/jsonpointer"
+)
+
+type (
+ Callbacks map[string]*CallbackRef
+ Examples map[string]*ExampleRef
+ Headers map[string]*HeaderRef
+ Links map[string]*LinkRef
+ ParametersMap map[string]*ParameterRef
+ RequestBodies map[string]*RequestBodyRef
+ ResponseBodies map[string]*ResponseRef
+ Schemas map[string]*SchemaRef
+ SecuritySchemes map[string]*SecuritySchemeRef
+)
+
+// Components is specified by OpenAPI/Swagger standard version 3.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#components-object
+type Components struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ Schemas Schemas `json:"schemas,omitempty" yaml:"schemas,omitempty"`
+ Parameters ParametersMap `json:"parameters,omitempty" yaml:"parameters,omitempty"`
+ Headers Headers `json:"headers,omitempty" yaml:"headers,omitempty"`
+ RequestBodies RequestBodies `json:"requestBodies,omitempty" yaml:"requestBodies,omitempty"`
+ Responses ResponseBodies `json:"responses,omitempty" yaml:"responses,omitempty"`
+ SecuritySchemes SecuritySchemes `json:"securitySchemes,omitempty" yaml:"securitySchemes,omitempty"`
+ Examples Examples `json:"examples,omitempty" yaml:"examples,omitempty"`
+ Links Links `json:"links,omitempty" yaml:"links,omitempty"`
+ Callbacks Callbacks `json:"callbacks,omitempty" yaml:"callbacks,omitempty"`
+}
+
+func NewComponents() Components {
+ return Components{}
+}
+
+// MarshalJSON returns the JSON encoding of Components.
+func (components Components) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 9+len(components.Extensions))
+ for k, v := range components.Extensions {
+ m[k] = v
+ }
+ if x := components.Schemas; len(x) != 0 {
+ m["schemas"] = x
+ }
+ if x := components.Parameters; len(x) != 0 {
+ m["parameters"] = x
+ }
+ if x := components.Headers; len(x) != 0 {
+ m["headers"] = x
+ }
+ if x := components.RequestBodies; len(x) != 0 {
+ m["requestBodies"] = x
+ }
+ if x := components.Responses; len(x) != 0 {
+ m["responses"] = x
+ }
+ if x := components.SecuritySchemes; len(x) != 0 {
+ m["securitySchemes"] = x
+ }
+ if x := components.Examples; len(x) != 0 {
+ m["examples"] = x
+ }
+ if x := components.Links; len(x) != 0 {
+ m["links"] = x
+ }
+ if x := components.Callbacks; len(x) != 0 {
+ m["callbacks"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets Components to a copy of data.
+func (components *Components) UnmarshalJSON(data []byte) error {
+ type ComponentsBis Components
+ var x ComponentsBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "schemas")
+ delete(x.Extensions, "parameters")
+ delete(x.Extensions, "headers")
+ delete(x.Extensions, "requestBodies")
+ delete(x.Extensions, "responses")
+ delete(x.Extensions, "securitySchemes")
+ delete(x.Extensions, "examples")
+ delete(x.Extensions, "links")
+ delete(x.Extensions, "callbacks")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *components = Components(x)
+ return nil
+}
+
+// Validate returns an error if Components does not comply with the OpenAPI spec.
+func (components *Components) Validate(ctx context.Context, opts ...ValidationOption) (err error) {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ schemas := make([]string, 0, len(components.Schemas))
+ for name := range components.Schemas {
+ schemas = append(schemas, name)
+ }
+ sort.Strings(schemas)
+ for _, k := range schemas {
+ v := components.Schemas[k]
+ if err = ValidateIdentifier(k); err != nil {
+ return fmt.Errorf("schema %q: %w", k, err)
+ }
+ if err = v.Validate(ctx); err != nil {
+ return fmt.Errorf("schema %q: %w", k, err)
+ }
+ }
+
+ parameters := make([]string, 0, len(components.Parameters))
+ for name := range components.Parameters {
+ parameters = append(parameters, name)
+ }
+ sort.Strings(parameters)
+ for _, k := range parameters {
+ v := components.Parameters[k]
+ if err = ValidateIdentifier(k); err != nil {
+ return fmt.Errorf("parameter %q: %w", k, err)
+ }
+ if err = v.Validate(ctx); err != nil {
+ return fmt.Errorf("parameter %q: %w", k, err)
+ }
+ }
+
+ requestBodies := make([]string, 0, len(components.RequestBodies))
+ for name := range components.RequestBodies {
+ requestBodies = append(requestBodies, name)
+ }
+ sort.Strings(requestBodies)
+ for _, k := range requestBodies {
+ v := components.RequestBodies[k]
+ if err = ValidateIdentifier(k); err != nil {
+ return fmt.Errorf("request body %q: %w", k, err)
+ }
+ if err = v.Validate(ctx); err != nil {
+ return fmt.Errorf("request body %q: %w", k, err)
+ }
+ }
+
+ responses := make([]string, 0, len(components.Responses))
+ for name := range components.Responses {
+ responses = append(responses, name)
+ }
+ sort.Strings(responses)
+ for _, k := range responses {
+ if err = ValidateIdentifier(k); err != nil {
+ return fmt.Errorf("response %q: %w", k, err)
+ }
+ v := components.Responses[k]
+ if err = v.Validate(ctx); err != nil {
+ return fmt.Errorf("response %q: %w", k, err)
+ }
+ }
+
+ headers := make([]string, 0, len(components.Headers))
+ for name := range components.Headers {
+ headers = append(headers, name)
+ }
+ sort.Strings(headers)
+ for _, k := range headers {
+ v := components.Headers[k]
+ if err = ValidateIdentifier(k); err != nil {
+ return fmt.Errorf("header %q: %w", k, err)
+ }
+ if err = v.Validate(ctx); err != nil {
+ return fmt.Errorf("header %q: %w", k, err)
+ }
+ }
+
+ securitySchemes := make([]string, 0, len(components.SecuritySchemes))
+ for name := range components.SecuritySchemes {
+ securitySchemes = append(securitySchemes, name)
+ }
+ sort.Strings(securitySchemes)
+ for _, k := range securitySchemes {
+ v := components.SecuritySchemes[k]
+ if err = ValidateIdentifier(k); err != nil {
+ return fmt.Errorf("security scheme %q: %w", k, err)
+ }
+ if err = v.Validate(ctx); err != nil {
+ return fmt.Errorf("security scheme %q: %w", k, err)
+ }
+ }
+
+ examples := make([]string, 0, len(components.Examples))
+ for name := range components.Examples {
+ examples = append(examples, name)
+ }
+ sort.Strings(examples)
+ for _, k := range examples {
+ v := components.Examples[k]
+ if err = ValidateIdentifier(k); err != nil {
+ return fmt.Errorf("example %q: %w", k, err)
+ }
+ if err = v.Validate(ctx); err != nil {
+ return fmt.Errorf("example %q: %w", k, err)
+ }
+ }
+
+ links := make([]string, 0, len(components.Links))
+ for name := range components.Links {
+ links = append(links, name)
+ }
+ sort.Strings(links)
+ for _, k := range links {
+ v := components.Links[k]
+ if err = ValidateIdentifier(k); err != nil {
+ return fmt.Errorf("link %q: %w", k, err)
+ }
+ if err = v.Validate(ctx); err != nil {
+ return fmt.Errorf("link %q: %w", k, err)
+ }
+ }
+
+ callbacks := make([]string, 0, len(components.Callbacks))
+ for name := range components.Callbacks {
+ callbacks = append(callbacks, name)
+ }
+ sort.Strings(callbacks)
+ for _, k := range callbacks {
+ v := components.Callbacks[k]
+ if err = ValidateIdentifier(k); err != nil {
+ return fmt.Errorf("callback %q: %w", k, err)
+ }
+ if err = v.Validate(ctx); err != nil {
+ return fmt.Errorf("callback %q: %w", k, err)
+ }
+ }
+
+ return validateExtensions(ctx, components.Extensions)
+}
+
+var _ jsonpointer.JSONPointable = (*Schemas)(nil)
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (m Schemas) JSONLookup(token string) (interface{}, error) {
+ if v, ok := m[token]; !ok || v == nil {
+ return nil, fmt.Errorf("no schema %q", token)
+ } else if ref := v.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ } else {
+ return v.Value, nil
+ }
+}
+
+var _ jsonpointer.JSONPointable = (*ParametersMap)(nil)
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (m ParametersMap) JSONLookup(token string) (interface{}, error) {
+ if v, ok := m[token]; !ok || v == nil {
+ return nil, fmt.Errorf("no parameter %q", token)
+ } else if ref := v.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ } else {
+ return v.Value, nil
+ }
+}
+
+var _ jsonpointer.JSONPointable = (*Headers)(nil)
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (m Headers) JSONLookup(token string) (interface{}, error) {
+ if v, ok := m[token]; !ok || v == nil {
+ return nil, fmt.Errorf("no header %q", token)
+ } else if ref := v.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ } else {
+ return v.Value, nil
+ }
+}
+
+var _ jsonpointer.JSONPointable = (*RequestBodyRef)(nil)
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (m RequestBodies) JSONLookup(token string) (interface{}, error) {
+ if v, ok := m[token]; !ok || v == nil {
+ return nil, fmt.Errorf("no request body %q", token)
+ } else if ref := v.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ } else {
+ return v.Value, nil
+ }
+}
+
+var _ jsonpointer.JSONPointable = (*ResponseRef)(nil)
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (m ResponseBodies) JSONLookup(token string) (interface{}, error) {
+ if v, ok := m[token]; !ok || v == nil {
+ return nil, fmt.Errorf("no response body %q", token)
+ } else if ref := v.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ } else {
+ return v.Value, nil
+ }
+}
+
+var _ jsonpointer.JSONPointable = (*SecuritySchemes)(nil)
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (m SecuritySchemes) JSONLookup(token string) (interface{}, error) {
+ if v, ok := m[token]; !ok || v == nil {
+ return nil, fmt.Errorf("no security scheme body %q", token)
+ } else if ref := v.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ } else {
+ return v.Value, nil
+ }
+}
+
+var _ jsonpointer.JSONPointable = (*Examples)(nil)
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (m Examples) JSONLookup(token string) (interface{}, error) {
+ if v, ok := m[token]; !ok || v == nil {
+ return nil, fmt.Errorf("no example body %q", token)
+ } else if ref := v.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ } else {
+ return v.Value, nil
+ }
+}
+
+var _ jsonpointer.JSONPointable = (*Links)(nil)
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (m Links) JSONLookup(token string) (interface{}, error) {
+ if v, ok := m[token]; !ok || v == nil {
+ return nil, fmt.Errorf("no link body %q", token)
+ } else if ref := v.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ } else {
+ return v.Value, nil
+ }
+}
+
+var _ jsonpointer.JSONPointable = (*Callbacks)(nil)
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (m Callbacks) JSONLookup(token string) (interface{}, error) {
+ if v, ok := m[token]; !ok || v == nil {
+ return nil, fmt.Errorf("no callback body %q", token)
+ } else if ref := v.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ } else {
+ return v.Value, nil
+ }
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/contact.go b/vendor/github.com/getkin/kin-openapi/openapi3/contact.go
new file mode 100644
index 00000000..e60d2818
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/contact.go
@@ -0,0 +1,59 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+)
+
+// Contact is specified by OpenAPI/Swagger standard version 3.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#contact-object
+type Contact struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ Name string `json:"name,omitempty" yaml:"name,omitempty"`
+ URL string `json:"url,omitempty" yaml:"url,omitempty"`
+ Email string `json:"email,omitempty" yaml:"email,omitempty"`
+}
+
+// MarshalJSON returns the JSON encoding of Contact.
+func (contact Contact) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 3+len(contact.Extensions))
+ for k, v := range contact.Extensions {
+ m[k] = v
+ }
+ if x := contact.Name; x != "" {
+ m["name"] = x
+ }
+ if x := contact.URL; x != "" {
+ m["url"] = x
+ }
+ if x := contact.Email; x != "" {
+ m["email"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets Contact to a copy of data.
+func (contact *Contact) UnmarshalJSON(data []byte) error {
+ type ContactBis Contact
+ var x ContactBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "name")
+ delete(x.Extensions, "url")
+ delete(x.Extensions, "email")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *contact = Contact(x)
+ return nil
+}
+
+// Validate returns an error if Contact does not comply with the OpenAPI spec.
+func (contact *Contact) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ return validateExtensions(ctx, contact.Extensions)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/content.go b/vendor/github.com/getkin/kin-openapi/openapi3/content.go
new file mode 100644
index 00000000..81b070ee
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/content.go
@@ -0,0 +1,124 @@
+package openapi3
+
+import (
+ "context"
+ "sort"
+ "strings"
+)
+
+// Content is specified by OpenAPI/Swagger 3.0 standard.
+type Content map[string]*MediaType
+
+func NewContent() Content {
+ return make(map[string]*MediaType)
+}
+
+func NewContentWithSchema(schema *Schema, consumes []string) Content {
+ if len(consumes) == 0 {
+ return Content{
+ "*/*": NewMediaType().WithSchema(schema),
+ }
+ }
+ content := make(map[string]*MediaType, len(consumes))
+ for _, mediaType := range consumes {
+ content[mediaType] = NewMediaType().WithSchema(schema)
+ }
+ return content
+}
+
+func NewContentWithSchemaRef(schema *SchemaRef, consumes []string) Content {
+ if len(consumes) == 0 {
+ return Content{
+ "*/*": NewMediaType().WithSchemaRef(schema),
+ }
+ }
+ content := make(map[string]*MediaType, len(consumes))
+ for _, mediaType := range consumes {
+ content[mediaType] = NewMediaType().WithSchemaRef(schema)
+ }
+ return content
+}
+
+func NewContentWithJSONSchema(schema *Schema) Content {
+ return Content{
+ "application/json": NewMediaType().WithSchema(schema),
+ }
+}
+func NewContentWithJSONSchemaRef(schema *SchemaRef) Content {
+ return Content{
+ "application/json": NewMediaType().WithSchemaRef(schema),
+ }
+}
+
+func NewContentWithFormDataSchema(schema *Schema) Content {
+ return Content{
+ "multipart/form-data": NewMediaType().WithSchema(schema),
+ }
+}
+
+func NewContentWithFormDataSchemaRef(schema *SchemaRef) Content {
+ return Content{
+ "multipart/form-data": NewMediaType().WithSchemaRef(schema),
+ }
+}
+
+func (content Content) Get(mime string) *MediaType {
+ // If the mime is empty then short-circuit to the wildcard.
+ // We do this here so that we catch only the specific case of
+ // and empty mime rather than a present, but invalid, mime type.
+ if mime == "" {
+ return content["*/*"]
+ }
+ // Start by making the most specific match possible
+ // by using the mime type in full.
+ if v := content[mime]; v != nil {
+ return v
+ }
+ // If an exact match is not found then we strip all
+ // metadata from the mime type and only use the x/y
+ // portion.
+ i := strings.IndexByte(mime, ';')
+ if i < 0 {
+ // If there is no metadata then preserve the full mime type
+ // string for later wildcard searches.
+ i = len(mime)
+ }
+ mime = mime[:i]
+ if v := content[mime]; v != nil {
+ return v
+ }
+ // If the x/y pattern has no specific match then we
+ // try the x/* pattern.
+ i = strings.IndexByte(mime, '/')
+ if i < 0 {
+ // In the case that the given mime type is not valid because it is
+ // missing the subtype we return nil so that this does not accidentally
+ // resolve with the wildcard.
+ return nil
+ }
+ mime = mime[:i] + "/*"
+ if v := content[mime]; v != nil {
+ return v
+ }
+ // Finally, the most generic match of */* is returned
+ // as a catch-all.
+ return content["*/*"]
+}
+
+// Validate returns an error if Content does not comply with the OpenAPI spec.
+func (content Content) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ keys := make([]string, 0, len(content))
+ for key := range content {
+ keys = append(keys, key)
+ }
+ sort.Strings(keys)
+ for _, k := range keys {
+ v := content[k]
+ if err := v.Validate(ctx); err != nil {
+ return err
+ }
+ }
+ return nil
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/discriminator.go b/vendor/github.com/getkin/kin-openapi/openapi3/discriminator.go
new file mode 100644
index 00000000..abb48074
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/discriminator.go
@@ -0,0 +1,52 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+)
+
+// Discriminator is specified by OpenAPI/Swagger standard version 3.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#discriminator-object
+type Discriminator struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ PropertyName string `json:"propertyName" yaml:"propertyName"` // required
+ Mapping map[string]string `json:"mapping,omitempty" yaml:"mapping,omitempty"`
+}
+
+// MarshalJSON returns the JSON encoding of Discriminator.
+func (discriminator Discriminator) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 2+len(discriminator.Extensions))
+ for k, v := range discriminator.Extensions {
+ m[k] = v
+ }
+ m["propertyName"] = discriminator.PropertyName
+ if x := discriminator.Mapping; len(x) != 0 {
+ m["mapping"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets Discriminator to a copy of data.
+func (discriminator *Discriminator) UnmarshalJSON(data []byte) error {
+ type DiscriminatorBis Discriminator
+ var x DiscriminatorBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "propertyName")
+ delete(x.Extensions, "mapping")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *discriminator = Discriminator(x)
+ return nil
+}
+
+// Validate returns an error if Discriminator does not comply with the OpenAPI spec.
+func (discriminator *Discriminator) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ return validateExtensions(ctx, discriminator.Extensions)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/doc.go b/vendor/github.com/getkin/kin-openapi/openapi3/doc.go
new file mode 100644
index 00000000..41c9965c
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/doc.go
@@ -0,0 +1,4 @@
+// Package openapi3 parses and writes OpenAPI 3 specification documents.
+//
+// See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.3.md
+package openapi3
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/encoding.go b/vendor/github.com/getkin/kin-openapi/openapi3/encoding.go
new file mode 100644
index 00000000..8e810279
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/encoding.go
@@ -0,0 +1,139 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "sort"
+)
+
+// Encoding is specified by OpenAPI/Swagger 3.0 standard.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#encoding-object
+type Encoding struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ ContentType string `json:"contentType,omitempty" yaml:"contentType,omitempty"`
+ Headers Headers `json:"headers,omitempty" yaml:"headers,omitempty"`
+ Style string `json:"style,omitempty" yaml:"style,omitempty"`
+ Explode *bool `json:"explode,omitempty" yaml:"explode,omitempty"`
+ AllowReserved bool `json:"allowReserved,omitempty" yaml:"allowReserved,omitempty"`
+}
+
+func NewEncoding() *Encoding {
+ return &Encoding{}
+}
+
+func (encoding *Encoding) WithHeader(name string, header *Header) *Encoding {
+ return encoding.WithHeaderRef(name, &HeaderRef{
+ Value: header,
+ })
+}
+
+func (encoding *Encoding) WithHeaderRef(name string, ref *HeaderRef) *Encoding {
+ headers := encoding.Headers
+ if headers == nil {
+ headers = make(map[string]*HeaderRef)
+ encoding.Headers = headers
+ }
+ headers[name] = ref
+ return encoding
+}
+
+// MarshalJSON returns the JSON encoding of Encoding.
+func (encoding Encoding) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 5+len(encoding.Extensions))
+ for k, v := range encoding.Extensions {
+ m[k] = v
+ }
+ if x := encoding.ContentType; x != "" {
+ m["contentType"] = x
+ }
+ if x := encoding.Headers; len(x) != 0 {
+ m["headers"] = x
+ }
+ if x := encoding.Style; x != "" {
+ m["style"] = x
+ }
+ if x := encoding.Explode; x != nil {
+ m["explode"] = x
+ }
+ if x := encoding.AllowReserved; x {
+ m["allowReserved"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets Encoding to a copy of data.
+func (encoding *Encoding) UnmarshalJSON(data []byte) error {
+ type EncodingBis Encoding
+ var x EncodingBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "contentType")
+ delete(x.Extensions, "headers")
+ delete(x.Extensions, "style")
+ delete(x.Extensions, "explode")
+ delete(x.Extensions, "allowReserved")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *encoding = Encoding(x)
+ return nil
+}
+
+// SerializationMethod returns a serialization method of request body.
+// When serialization method is not defined the method returns the default serialization method.
+func (encoding *Encoding) SerializationMethod() *SerializationMethod {
+ sm := &SerializationMethod{Style: SerializationForm, Explode: true}
+ if encoding != nil {
+ if encoding.Style != "" {
+ sm.Style = encoding.Style
+ }
+ if encoding.Explode != nil {
+ sm.Explode = *encoding.Explode
+ }
+ }
+ return sm
+}
+
+// Validate returns an error if Encoding does not comply with the OpenAPI spec.
+func (encoding *Encoding) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if encoding == nil {
+ return nil
+ }
+
+ headers := make([]string, 0, len(encoding.Headers))
+ for k := range encoding.Headers {
+ headers = append(headers, k)
+ }
+ sort.Strings(headers)
+ for _, k := range headers {
+ v := encoding.Headers[k]
+ if err := ValidateIdentifier(k); err != nil {
+ return nil
+ }
+ if err := v.Validate(ctx); err != nil {
+ return nil
+ }
+ }
+
+ // Validate a media types's serialization method.
+ sm := encoding.SerializationMethod()
+ switch {
+ case sm.Style == SerializationForm && sm.Explode,
+ sm.Style == SerializationForm && !sm.Explode,
+ sm.Style == SerializationSpaceDelimited && sm.Explode,
+ sm.Style == SerializationSpaceDelimited && !sm.Explode,
+ sm.Style == SerializationPipeDelimited && sm.Explode,
+ sm.Style == SerializationPipeDelimited && !sm.Explode,
+ sm.Style == SerializationDeepObject && sm.Explode:
+ default:
+ return fmt.Errorf("serialization method with style=%q and explode=%v is not supported by media type", sm.Style, sm.Explode)
+ }
+
+ return validateExtensions(ctx, encoding.Extensions)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/errors.go b/vendor/github.com/getkin/kin-openapi/openapi3/errors.go
new file mode 100644
index 00000000..74baab9a
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/errors.go
@@ -0,0 +1,59 @@
+package openapi3
+
+import (
+ "bytes"
+ "errors"
+)
+
+// MultiError is a collection of errors, intended for when
+// multiple issues need to be reported upstream
+type MultiError []error
+
+func (me MultiError) Error() string {
+ return spliceErr(" | ", me)
+}
+
+func spliceErr(sep string, errs []error) string {
+ buff := &bytes.Buffer{}
+ for i, e := range errs {
+ buff.WriteString(e.Error())
+ if i != len(errs)-1 {
+ buff.WriteString(sep)
+ }
+ }
+ return buff.String()
+}
+
+// Is allows you to determine if a generic error is in fact a MultiError using `errors.Is()`
+// It will also return true if any of the contained errors match target
+func (me MultiError) Is(target error) bool {
+ if _, ok := target.(MultiError); ok {
+ return true
+ }
+ for _, e := range me {
+ if errors.Is(e, target) {
+ return true
+ }
+ }
+ return false
+}
+
+// As allows you to use `errors.As()` to set target to the first error within the multi error that matches the target type
+func (me MultiError) As(target interface{}) bool {
+ for _, e := range me {
+ if errors.As(e, target) {
+ return true
+ }
+ }
+ return false
+}
+
+type multiErrorForOneOf MultiError
+
+func (meo multiErrorForOneOf) Error() string {
+ return spliceErr(" Or ", meo)
+}
+
+func (meo multiErrorForOneOf) Unwrap() error {
+ return MultiError(meo)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/example.go b/vendor/github.com/getkin/kin-openapi/openapi3/example.go
new file mode 100644
index 00000000..44e71d82
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/example.go
@@ -0,0 +1,76 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+)
+
+// Example is specified by OpenAPI/Swagger 3.0 standard.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#example-object
+type Example struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ Summary string `json:"summary,omitempty" yaml:"summary,omitempty"`
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ Value interface{} `json:"value,omitempty" yaml:"value,omitempty"`
+ ExternalValue string `json:"externalValue,omitempty" yaml:"externalValue,omitempty"`
+}
+
+func NewExample(value interface{}) *Example {
+ return &Example{Value: value}
+}
+
+// MarshalJSON returns the JSON encoding of Example.
+func (example Example) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 4+len(example.Extensions))
+ for k, v := range example.Extensions {
+ m[k] = v
+ }
+ if x := example.Summary; x != "" {
+ m["summary"] = x
+ }
+ if x := example.Description; x != "" {
+ m["description"] = x
+ }
+ if x := example.Value; x != nil {
+ m["value"] = x
+ }
+ if x := example.ExternalValue; x != "" {
+ m["externalValue"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets Example to a copy of data.
+func (example *Example) UnmarshalJSON(data []byte) error {
+ type ExampleBis Example
+ var x ExampleBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "summary")
+ delete(x.Extensions, "description")
+ delete(x.Extensions, "value")
+ delete(x.Extensions, "externalValue")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *example = Example(x)
+ return nil
+}
+
+// Validate returns an error if Example does not comply with the OpenAPI spec.
+func (example *Example) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if example.Value != nil && example.ExternalValue != "" {
+ return errors.New("value and externalValue are mutually exclusive")
+ }
+ if example.Value == nil && example.ExternalValue == "" {
+ return errors.New("no value or externalValue field")
+ }
+
+ return validateExtensions(ctx, example.Extensions)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/example_validation.go b/vendor/github.com/getkin/kin-openapi/openapi3/example_validation.go
new file mode 100644
index 00000000..fb7a1da1
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/example_validation.go
@@ -0,0 +1,16 @@
+package openapi3
+
+import "context"
+
+func validateExampleValue(ctx context.Context, input interface{}, schema *Schema) error {
+ opts := make([]SchemaValidationOption, 0, 2)
+
+ if vo := getValidationOptions(ctx); vo.examplesValidationAsReq {
+ opts = append(opts, VisitAsRequest())
+ } else if vo.examplesValidationAsRes {
+ opts = append(opts, VisitAsResponse())
+ }
+ opts = append(opts, MultiErrors())
+
+ return schema.VisitJSON(input, opts...)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/extension.go b/vendor/github.com/getkin/kin-openapi/openapi3/extension.go
new file mode 100644
index 00000000..37f6b01e
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/extension.go
@@ -0,0 +1,32 @@
+package openapi3
+
+import (
+ "context"
+ "fmt"
+ "sort"
+ "strings"
+)
+
+func validateExtensions(ctx context.Context, extensions map[string]interface{}) error { // FIXME: newtype + Validate(...)
+ allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed
+
+ var unknowns []string
+ for k := range extensions {
+ if strings.HasPrefix(k, "x-") {
+ continue
+ }
+ if allowed != nil {
+ if _, ok := allowed[k]; ok {
+ continue
+ }
+ }
+ unknowns = append(unknowns, k)
+ }
+
+ if len(unknowns) != 0 {
+ sort.Strings(unknowns)
+ return fmt.Errorf("extra sibling fields: %+v", unknowns)
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/external_docs.go b/vendor/github.com/getkin/kin-openapi/openapi3/external_docs.go
new file mode 100644
index 00000000..7190be4b
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/external_docs.go
@@ -0,0 +1,64 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net/url"
+)
+
+// ExternalDocs is specified by OpenAPI/Swagger standard version 3.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#external-documentation-object
+type ExternalDocs struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ URL string `json:"url,omitempty" yaml:"url,omitempty"`
+}
+
+// MarshalJSON returns the JSON encoding of ExternalDocs.
+func (e ExternalDocs) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 2+len(e.Extensions))
+ for k, v := range e.Extensions {
+ m[k] = v
+ }
+ if x := e.Description; x != "" {
+ m["description"] = x
+ }
+ if x := e.URL; x != "" {
+ m["url"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets ExternalDocs to a copy of data.
+func (e *ExternalDocs) UnmarshalJSON(data []byte) error {
+ type ExternalDocsBis ExternalDocs
+ var x ExternalDocsBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "description")
+ delete(x.Extensions, "url")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *e = ExternalDocs(x)
+ return nil
+}
+
+// Validate returns an error if ExternalDocs does not comply with the OpenAPI spec.
+func (e *ExternalDocs) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if e.URL == "" {
+ return errors.New("url is required")
+ }
+ if _, err := url.Parse(e.URL); err != nil {
+ return fmt.Errorf("url is incorrect: %w", err)
+ }
+
+ return validateExtensions(ctx, e.Extensions)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/header.go b/vendor/github.com/getkin/kin-openapi/openapi3/header.go
new file mode 100644
index 00000000..e5eee6cc
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/header.go
@@ -0,0 +1,96 @@
+package openapi3
+
+import (
+ "context"
+ "errors"
+ "fmt"
+
+ "github.com/go-openapi/jsonpointer"
+)
+
+// Header is specified by OpenAPI/Swagger 3.0 standard.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#header-object
+type Header struct {
+ Parameter
+}
+
+var _ jsonpointer.JSONPointable = (*Header)(nil)
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (header Header) JSONLookup(token string) (interface{}, error) {
+ return header.Parameter.JSONLookup(token)
+}
+
+// MarshalJSON returns the JSON encoding of Header.
+func (header Header) MarshalJSON() ([]byte, error) {
+ return header.Parameter.MarshalJSON()
+}
+
+// UnmarshalJSON sets Header to a copy of data.
+func (header *Header) UnmarshalJSON(data []byte) error {
+ return header.Parameter.UnmarshalJSON(data)
+}
+
+// MarshalYAML returns the JSON encoding of Header.
+func (header Header) MarshalYAML() (interface{}, error) {
+ return header.Parameter, nil
+}
+
+// SerializationMethod returns a header's serialization method.
+func (header *Header) SerializationMethod() (*SerializationMethod, error) {
+ style := header.Style
+ if style == "" {
+ style = SerializationSimple
+ }
+ explode := false
+ if header.Explode != nil {
+ explode = *header.Explode
+ }
+ return &SerializationMethod{Style: style, Explode: explode}, nil
+}
+
+// Validate returns an error if Header does not comply with the OpenAPI spec.
+func (header *Header) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if header.Name != "" {
+ return errors.New("header 'name' MUST NOT be specified, it is given in the corresponding headers map")
+ }
+ if header.In != "" {
+ return errors.New("header 'in' MUST NOT be specified, it is implicitly in header")
+ }
+
+ // Validate a parameter's serialization method.
+ sm, err := header.SerializationMethod()
+ if err != nil {
+ return err
+ }
+ if smSupported := false ||
+ sm.Style == SerializationSimple && !sm.Explode ||
+ sm.Style == SerializationSimple && sm.Explode; !smSupported {
+ e := fmt.Errorf("serialization method with style=%q and explode=%v is not supported by a header parameter", sm.Style, sm.Explode)
+ return fmt.Errorf("header schema is invalid: %w", e)
+ }
+
+ if (header.Schema == nil) == (len(header.Content) == 0) {
+ e := fmt.Errorf("parameter must contain exactly one of content and schema: %v", header)
+ return fmt.Errorf("header schema is invalid: %w", e)
+ }
+ if schema := header.Schema; schema != nil {
+ if err := schema.Validate(ctx); err != nil {
+ return fmt.Errorf("header schema is invalid: %w", err)
+ }
+ }
+
+ if content := header.Content; content != nil {
+ e := errors.New("parameter content must only contain one entry")
+ if len(content) > 1 {
+ return fmt.Errorf("header content is invalid: %w", e)
+ }
+
+ if err := content.Validate(ctx); err != nil {
+ return fmt.Errorf("header content is invalid: %w", err)
+ }
+ }
+ return nil
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/helpers.go b/vendor/github.com/getkin/kin-openapi/openapi3/helpers.go
new file mode 100644
index 00000000..d160eb1e
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/helpers.go
@@ -0,0 +1,41 @@
+package openapi3
+
+import (
+ "fmt"
+ "regexp"
+)
+
+const identifierPattern = `^[a-zA-Z0-9._-]+$`
+
+// IdentifierRegExp verifies whether Component object key matches 'identifierPattern' pattern, according to OpenAPI v3.x.
+// However, to be able supporting legacy OpenAPI v2.x, there is a need to customize above pattern in order not to fail
+// converted v2-v3 validation
+var IdentifierRegExp = regexp.MustCompile(identifierPattern)
+
+// ValidateIdentifier returns an error if the given component name does not match IdentifierRegExp.
+func ValidateIdentifier(value string) error {
+ if IdentifierRegExp.MatchString(value) {
+ return nil
+ }
+ return fmt.Errorf("identifier %q is not supported by OpenAPIv3 standard (regexp: %q)", value, identifierPattern)
+}
+
+// Float64Ptr is a helper for defining OpenAPI schemas.
+func Float64Ptr(value float64) *float64 {
+ return &value
+}
+
+// BoolPtr is a helper for defining OpenAPI schemas.
+func BoolPtr(value bool) *bool {
+ return &value
+}
+
+// Int64Ptr is a helper for defining OpenAPI schemas.
+func Int64Ptr(value int64) *int64 {
+ return &value
+}
+
+// Uint64Ptr is a helper for defining OpenAPI schemas.
+func Uint64Ptr(value uint64) *uint64 {
+ return &value
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/info.go b/vendor/github.com/getkin/kin-openapi/openapi3/info.go
new file mode 100644
index 00000000..ffcd3b0e
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/info.go
@@ -0,0 +1,91 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+)
+
+// Info is specified by OpenAPI/Swagger standard version 3.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#info-object
+type Info struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ Title string `json:"title" yaml:"title"` // Required
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ TermsOfService string `json:"termsOfService,omitempty" yaml:"termsOfService,omitempty"`
+ Contact *Contact `json:"contact,omitempty" yaml:"contact,omitempty"`
+ License *License `json:"license,omitempty" yaml:"license,omitempty"`
+ Version string `json:"version" yaml:"version"` // Required
+}
+
+// MarshalJSON returns the JSON encoding of Info.
+func (info Info) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 6+len(info.Extensions))
+ for k, v := range info.Extensions {
+ m[k] = v
+ }
+ m["title"] = info.Title
+ if x := info.Description; x != "" {
+ m["description"] = x
+ }
+ if x := info.TermsOfService; x != "" {
+ m["termsOfService"] = x
+ }
+ if x := info.Contact; x != nil {
+ m["contact"] = x
+ }
+ if x := info.License; x != nil {
+ m["license"] = x
+ }
+ m["version"] = info.Version
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets Info to a copy of data.
+func (info *Info) UnmarshalJSON(data []byte) error {
+ type InfoBis Info
+ var x InfoBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "title")
+ delete(x.Extensions, "description")
+ delete(x.Extensions, "termsOfService")
+ delete(x.Extensions, "contact")
+ delete(x.Extensions, "license")
+ delete(x.Extensions, "version")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *info = Info(x)
+ return nil
+}
+
+// Validate returns an error if Info does not comply with the OpenAPI spec.
+func (info *Info) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if contact := info.Contact; contact != nil {
+ if err := contact.Validate(ctx); err != nil {
+ return err
+ }
+ }
+
+ if license := info.License; license != nil {
+ if err := license.Validate(ctx); err != nil {
+ return err
+ }
+ }
+
+ if info.Version == "" {
+ return errors.New("value of version must be a non-empty string")
+ }
+
+ if info.Title == "" {
+ return errors.New("value of title must be a non-empty string")
+ }
+
+ return validateExtensions(ctx, info.Extensions)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/internalize_refs.go b/vendor/github.com/getkin/kin-openapi/openapi3/internalize_refs.go
new file mode 100644
index 00000000..e313e553
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/internalize_refs.go
@@ -0,0 +1,443 @@
+package openapi3
+
+import (
+ "context"
+ "path/filepath"
+ "strings"
+)
+
+type RefNameResolver func(string) string
+
+// DefaultRefResolver is a default implementation of refNameResolver for the
+// InternalizeRefs function.
+//
+// If a reference points to an element inside a document, it returns the last
+// element in the reference using filepath.Base. Otherwise if the reference points
+// to a file, it returns the file name trimmed of all extensions.
+func DefaultRefNameResolver(ref string) string {
+ if ref == "" {
+ return ""
+ }
+ split := strings.SplitN(ref, "#", 2)
+ if len(split) == 2 {
+ return filepath.Base(split[1])
+ }
+ ref = split[0]
+ for ext := filepath.Ext(ref); len(ext) > 0; ext = filepath.Ext(ref) {
+ ref = strings.TrimSuffix(ref, ext)
+ }
+ return filepath.Base(ref)
+}
+
+func schemaNames(s Schemas) []string {
+ out := make([]string, 0, len(s))
+ for i := range s {
+ out = append(out, i)
+ }
+ return out
+}
+
+func parametersMapNames(s ParametersMap) []string {
+ out := make([]string, 0, len(s))
+ for i := range s {
+ out = append(out, i)
+ }
+ return out
+}
+
+func isExternalRef(ref string, parentIsExternal bool) bool {
+ return ref != "" && (!strings.HasPrefix(ref, "#/components/") || parentIsExternal)
+}
+
+func (doc *T) addSchemaToSpec(s *SchemaRef, refNameResolver RefNameResolver, parentIsExternal bool) bool {
+ if s == nil || !isExternalRef(s.Ref, parentIsExternal) {
+ return false
+ }
+
+ name := refNameResolver(s.Ref)
+ if doc.Components != nil {
+ if _, ok := doc.Components.Schemas[name]; ok {
+ s.Ref = "#/components/schemas/" + name
+ return true
+ }
+ }
+
+ if doc.Components == nil {
+ doc.Components = &Components{}
+ }
+ if doc.Components.Schemas == nil {
+ doc.Components.Schemas = make(Schemas)
+ }
+ doc.Components.Schemas[name] = s.Value.NewRef()
+ s.Ref = "#/components/schemas/" + name
+ return true
+}
+
+func (doc *T) addParameterToSpec(p *ParameterRef, refNameResolver RefNameResolver, parentIsExternal bool) bool {
+ if p == nil || !isExternalRef(p.Ref, parentIsExternal) {
+ return false
+ }
+ name := refNameResolver(p.Ref)
+ if doc.Components != nil {
+ if _, ok := doc.Components.Parameters[name]; ok {
+ p.Ref = "#/components/parameters/" + name
+ return true
+ }
+ }
+
+ if doc.Components == nil {
+ doc.Components = &Components{}
+ }
+ if doc.Components.Parameters == nil {
+ doc.Components.Parameters = make(ParametersMap)
+ }
+ doc.Components.Parameters[name] = &ParameterRef{Value: p.Value}
+ p.Ref = "#/components/parameters/" + name
+ return true
+}
+
+func (doc *T) addHeaderToSpec(h *HeaderRef, refNameResolver RefNameResolver, parentIsExternal bool) bool {
+ if h == nil || !isExternalRef(h.Ref, parentIsExternal) {
+ return false
+ }
+ name := refNameResolver(h.Ref)
+ if doc.Components != nil {
+ if _, ok := doc.Components.Headers[name]; ok {
+ h.Ref = "#/components/headers/" + name
+ return true
+ }
+ }
+
+ if doc.Components == nil {
+ doc.Components = &Components{}
+ }
+ if doc.Components.Headers == nil {
+ doc.Components.Headers = make(Headers)
+ }
+ doc.Components.Headers[name] = &HeaderRef{Value: h.Value}
+ h.Ref = "#/components/headers/" + name
+ return true
+}
+
+func (doc *T) addRequestBodyToSpec(r *RequestBodyRef, refNameResolver RefNameResolver, parentIsExternal bool) bool {
+ if r == nil || !isExternalRef(r.Ref, parentIsExternal) {
+ return false
+ }
+ name := refNameResolver(r.Ref)
+ if doc.Components != nil {
+ if _, ok := doc.Components.RequestBodies[name]; ok {
+ r.Ref = "#/components/requestBodies/" + name
+ return true
+ }
+ }
+
+ if doc.Components == nil {
+ doc.Components = &Components{}
+ }
+ if doc.Components.RequestBodies == nil {
+ doc.Components.RequestBodies = make(RequestBodies)
+ }
+ doc.Components.RequestBodies[name] = &RequestBodyRef{Value: r.Value}
+ r.Ref = "#/components/requestBodies/" + name
+ return true
+}
+
+func (doc *T) addResponseToSpec(r *ResponseRef, refNameResolver RefNameResolver, parentIsExternal bool) bool {
+ if r == nil || !isExternalRef(r.Ref, parentIsExternal) {
+ return false
+ }
+ name := refNameResolver(r.Ref)
+ if doc.Components != nil {
+ if _, ok := doc.Components.Responses[name]; ok {
+ r.Ref = "#/components/responses/" + name
+ return true
+ }
+ }
+
+ if doc.Components == nil {
+ doc.Components = &Components{}
+ }
+ if doc.Components.Responses == nil {
+ doc.Components.Responses = make(ResponseBodies)
+ }
+ doc.Components.Responses[name] = &ResponseRef{Value: r.Value}
+ r.Ref = "#/components/responses/" + name
+ return true
+}
+
+func (doc *T) addSecuritySchemeToSpec(ss *SecuritySchemeRef, refNameResolver RefNameResolver, parentIsExternal bool) {
+ if ss == nil || !isExternalRef(ss.Ref, parentIsExternal) {
+ return
+ }
+ name := refNameResolver(ss.Ref)
+ if doc.Components != nil {
+ if _, ok := doc.Components.SecuritySchemes[name]; ok {
+ ss.Ref = "#/components/securitySchemes/" + name
+ return
+ }
+ }
+
+ if doc.Components == nil {
+ doc.Components = &Components{}
+ }
+ if doc.Components.SecuritySchemes == nil {
+ doc.Components.SecuritySchemes = make(SecuritySchemes)
+ }
+ doc.Components.SecuritySchemes[name] = &SecuritySchemeRef{Value: ss.Value}
+ ss.Ref = "#/components/securitySchemes/" + name
+
+}
+
+func (doc *T) addExampleToSpec(e *ExampleRef, refNameResolver RefNameResolver, parentIsExternal bool) {
+ if e == nil || !isExternalRef(e.Ref, parentIsExternal) {
+ return
+ }
+ name := refNameResolver(e.Ref)
+ if doc.Components != nil {
+ if _, ok := doc.Components.Examples[name]; ok {
+ e.Ref = "#/components/examples/" + name
+ return
+ }
+ }
+
+ if doc.Components == nil {
+ doc.Components = &Components{}
+ }
+ if doc.Components.Examples == nil {
+ doc.Components.Examples = make(Examples)
+ }
+ doc.Components.Examples[name] = &ExampleRef{Value: e.Value}
+ e.Ref = "#/components/examples/" + name
+
+}
+
+func (doc *T) addLinkToSpec(l *LinkRef, refNameResolver RefNameResolver, parentIsExternal bool) {
+ if l == nil || !isExternalRef(l.Ref, parentIsExternal) {
+ return
+ }
+ name := refNameResolver(l.Ref)
+ if doc.Components != nil {
+ if _, ok := doc.Components.Links[name]; ok {
+ l.Ref = "#/components/links/" + name
+ return
+ }
+ }
+
+ if doc.Components == nil {
+ doc.Components = &Components{}
+ }
+ if doc.Components.Links == nil {
+ doc.Components.Links = make(Links)
+ }
+ doc.Components.Links[name] = &LinkRef{Value: l.Value}
+ l.Ref = "#/components/links/" + name
+
+}
+
+func (doc *T) addCallbackToSpec(c *CallbackRef, refNameResolver RefNameResolver, parentIsExternal bool) bool {
+ if c == nil || !isExternalRef(c.Ref, parentIsExternal) {
+ return false
+ }
+ name := refNameResolver(c.Ref)
+
+ if doc.Components == nil {
+ doc.Components = &Components{}
+ }
+ if doc.Components.Callbacks == nil {
+ doc.Components.Callbacks = make(Callbacks)
+ }
+ c.Ref = "#/components/callbacks/" + name
+ doc.Components.Callbacks[name] = &CallbackRef{Value: c.Value}
+ return true
+}
+
+func (doc *T) derefSchema(s *Schema, refNameResolver RefNameResolver, parentIsExternal bool) {
+ if s == nil || doc.isVisitedSchema(s) {
+ return
+ }
+
+ for _, list := range []SchemaRefs{s.AllOf, s.AnyOf, s.OneOf} {
+ for _, s2 := range list {
+ isExternal := doc.addSchemaToSpec(s2, refNameResolver, parentIsExternal)
+ if s2 != nil {
+ doc.derefSchema(s2.Value, refNameResolver, isExternal || parentIsExternal)
+ }
+ }
+ }
+ for _, s2 := range s.Properties {
+ isExternal := doc.addSchemaToSpec(s2, refNameResolver, parentIsExternal)
+ if s2 != nil {
+ doc.derefSchema(s2.Value, refNameResolver, isExternal || parentIsExternal)
+ }
+ }
+ for _, ref := range []*SchemaRef{s.Not, s.AdditionalProperties.Schema, s.Items} {
+ isExternal := doc.addSchemaToSpec(ref, refNameResolver, parentIsExternal)
+ if ref != nil {
+ doc.derefSchema(ref.Value, refNameResolver, isExternal || parentIsExternal)
+ }
+ }
+}
+
+func (doc *T) derefHeaders(hs Headers, refNameResolver RefNameResolver, parentIsExternal bool) {
+ for _, h := range hs {
+ isExternal := doc.addHeaderToSpec(h, refNameResolver, parentIsExternal)
+ if doc.isVisitedHeader(h.Value) {
+ continue
+ }
+ doc.derefParameter(h.Value.Parameter, refNameResolver, parentIsExternal || isExternal)
+ }
+}
+
+func (doc *T) derefExamples(es Examples, refNameResolver RefNameResolver, parentIsExternal bool) {
+ for _, e := range es {
+ doc.addExampleToSpec(e, refNameResolver, parentIsExternal)
+ }
+}
+
+func (doc *T) derefContent(c Content, refNameResolver RefNameResolver, parentIsExternal bool) {
+ for _, mediatype := range c {
+ isExternal := doc.addSchemaToSpec(mediatype.Schema, refNameResolver, parentIsExternal)
+ if mediatype.Schema != nil {
+ doc.derefSchema(mediatype.Schema.Value, refNameResolver, isExternal || parentIsExternal)
+ }
+ doc.derefExamples(mediatype.Examples, refNameResolver, parentIsExternal)
+ for _, e := range mediatype.Encoding {
+ doc.derefHeaders(e.Headers, refNameResolver, parentIsExternal)
+ }
+ }
+}
+
+func (doc *T) derefLinks(ls Links, refNameResolver RefNameResolver, parentIsExternal bool) {
+ for _, l := range ls {
+ doc.addLinkToSpec(l, refNameResolver, parentIsExternal)
+ }
+}
+
+func (doc *T) derefResponse(r *ResponseRef, refNameResolver RefNameResolver, parentIsExternal bool) {
+ isExternal := doc.addResponseToSpec(r, refNameResolver, parentIsExternal)
+ if v := r.Value; v != nil {
+ doc.derefHeaders(v.Headers, refNameResolver, isExternal || parentIsExternal)
+ doc.derefContent(v.Content, refNameResolver, isExternal || parentIsExternal)
+ doc.derefLinks(v.Links, refNameResolver, isExternal || parentIsExternal)
+ }
+}
+
+func (doc *T) derefResponses(rs *Responses, refNameResolver RefNameResolver, parentIsExternal bool) {
+ doc.derefResponseBodies(rs.Map(), refNameResolver, parentIsExternal)
+}
+
+func (doc *T) derefResponseBodies(es ResponseBodies, refNameResolver RefNameResolver, parentIsExternal bool) {
+ for _, e := range es {
+ doc.derefResponse(e, refNameResolver, parentIsExternal)
+ }
+}
+
+func (doc *T) derefParameter(p Parameter, refNameResolver RefNameResolver, parentIsExternal bool) {
+ isExternal := doc.addSchemaToSpec(p.Schema, refNameResolver, parentIsExternal)
+ doc.derefContent(p.Content, refNameResolver, parentIsExternal)
+ if p.Schema != nil {
+ doc.derefSchema(p.Schema.Value, refNameResolver, isExternal || parentIsExternal)
+ }
+}
+
+func (doc *T) derefRequestBody(r RequestBody, refNameResolver RefNameResolver, parentIsExternal bool) {
+ doc.derefContent(r.Content, refNameResolver, parentIsExternal)
+}
+
+func (doc *T) derefPaths(paths map[string]*PathItem, refNameResolver RefNameResolver, parentIsExternal bool) {
+ for _, ops := range paths {
+ pathIsExternal := isExternalRef(ops.Ref, parentIsExternal)
+ // inline full operations
+ ops.Ref = ""
+
+ for _, param := range ops.Parameters {
+ doc.addParameterToSpec(param, refNameResolver, pathIsExternal)
+ }
+
+ for _, op := range ops.Operations() {
+ isExternal := doc.addRequestBodyToSpec(op.RequestBody, refNameResolver, pathIsExternal)
+ if op.RequestBody != nil && op.RequestBody.Value != nil {
+ doc.derefRequestBody(*op.RequestBody.Value, refNameResolver, pathIsExternal || isExternal)
+ }
+ for _, cb := range op.Callbacks {
+ isExternal := doc.addCallbackToSpec(cb, refNameResolver, pathIsExternal)
+ if cb.Value != nil {
+ cbValue := (*cb.Value).Map()
+ doc.derefPaths(cbValue, refNameResolver, pathIsExternal || isExternal)
+ }
+ }
+ doc.derefResponses(op.Responses, refNameResolver, pathIsExternal)
+ for _, param := range op.Parameters {
+ isExternal := doc.addParameterToSpec(param, refNameResolver, pathIsExternal)
+ if param.Value != nil {
+ doc.derefParameter(*param.Value, refNameResolver, pathIsExternal || isExternal)
+ }
+ }
+ }
+ }
+}
+
+// InternalizeRefs removes all references to external files from the spec and moves them
+// to the components section.
+//
+// refNameResolver takes in references to returns a name to store the reference under locally.
+// It MUST return a unique name for each reference type.
+// A default implementation is provided that will suffice for most use cases. See the function
+// documentation for more details.
+//
+// Example:
+//
+// doc.InternalizeRefs(context.Background(), nil)
+func (doc *T) InternalizeRefs(ctx context.Context, refNameResolver func(ref string) string) {
+ doc.resetVisited()
+
+ if refNameResolver == nil {
+ refNameResolver = DefaultRefNameResolver
+ }
+
+ if components := doc.Components; components != nil {
+ names := schemaNames(components.Schemas)
+ for _, name := range names {
+ schema := components.Schemas[name]
+ isExternal := doc.addSchemaToSpec(schema, refNameResolver, false)
+ if schema != nil {
+ schema.Ref = "" // always dereference the top level
+ doc.derefSchema(schema.Value, refNameResolver, isExternal)
+ }
+ }
+ names = parametersMapNames(components.Parameters)
+ for _, name := range names {
+ p := components.Parameters[name]
+ isExternal := doc.addParameterToSpec(p, refNameResolver, false)
+ if p != nil && p.Value != nil {
+ p.Ref = "" // always dereference the top level
+ doc.derefParameter(*p.Value, refNameResolver, isExternal)
+ }
+ }
+ doc.derefHeaders(components.Headers, refNameResolver, false)
+ for _, req := range components.RequestBodies {
+ isExternal := doc.addRequestBodyToSpec(req, refNameResolver, false)
+ if req != nil && req.Value != nil {
+ req.Ref = "" // always dereference the top level
+ doc.derefRequestBody(*req.Value, refNameResolver, isExternal)
+ }
+ }
+ doc.derefResponseBodies(components.Responses, refNameResolver, false)
+ for _, ss := range components.SecuritySchemes {
+ doc.addSecuritySchemeToSpec(ss, refNameResolver, false)
+ }
+ doc.derefExamples(components.Examples, refNameResolver, false)
+ doc.derefLinks(components.Links, refNameResolver, false)
+
+ for _, cb := range components.Callbacks {
+ isExternal := doc.addCallbackToSpec(cb, refNameResolver, false)
+ if cb != nil && cb.Value != nil {
+ cb.Ref = "" // always dereference the top level
+ cbValue := (*cb.Value).Map()
+ doc.derefPaths(cbValue, refNameResolver, isExternal)
+ }
+ }
+ }
+
+ doc.derefPaths(doc.Paths.Map(), refNameResolver, false)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/license.go b/vendor/github.com/getkin/kin-openapi/openapi3/license.go
new file mode 100644
index 00000000..3d2d2f06
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/license.go
@@ -0,0 +1,57 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+)
+
+// License is specified by OpenAPI/Swagger standard version 3.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#license-object
+type License struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ Name string `json:"name" yaml:"name"` // Required
+ URL string `json:"url,omitempty" yaml:"url,omitempty"`
+}
+
+// MarshalJSON returns the JSON encoding of License.
+func (license License) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 2+len(license.Extensions))
+ for k, v := range license.Extensions {
+ m[k] = v
+ }
+ m["name"] = license.Name
+ if x := license.URL; x != "" {
+ m["url"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets License to a copy of data.
+func (license *License) UnmarshalJSON(data []byte) error {
+ type LicenseBis License
+ var x LicenseBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "name")
+ delete(x.Extensions, "url")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *license = License(x)
+ return nil
+}
+
+// Validate returns an error if License does not comply with the OpenAPI spec.
+func (license *License) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if license.Name == "" {
+ return errors.New("value of license name must be a non-empty string")
+ }
+
+ return validateExtensions(ctx, license.Extensions)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/link.go b/vendor/github.com/getkin/kin-openapi/openapi3/link.go
new file mode 100644
index 00000000..23a8df41
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/link.go
@@ -0,0 +1,85 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+)
+
+// Link is specified by OpenAPI/Swagger standard version 3.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#link-object
+type Link struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ OperationRef string `json:"operationRef,omitempty" yaml:"operationRef,omitempty"`
+ OperationID string `json:"operationId,omitempty" yaml:"operationId,omitempty"`
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ Parameters map[string]interface{} `json:"parameters,omitempty" yaml:"parameters,omitempty"`
+ Server *Server `json:"server,omitempty" yaml:"server,omitempty"`
+ RequestBody interface{} `json:"requestBody,omitempty" yaml:"requestBody,omitempty"`
+}
+
+// MarshalJSON returns the JSON encoding of Link.
+func (link Link) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 6+len(link.Extensions))
+ for k, v := range link.Extensions {
+ m[k] = v
+ }
+
+ if x := link.OperationRef; x != "" {
+ m["operationRef"] = x
+ }
+ if x := link.OperationID; x != "" {
+ m["operationId"] = x
+ }
+ if x := link.Description; x != "" {
+ m["description"] = x
+ }
+ if x := link.Parameters; len(x) != 0 {
+ m["parameters"] = x
+ }
+ if x := link.Server; x != nil {
+ m["server"] = x
+ }
+ if x := link.RequestBody; x != nil {
+ m["requestBody"] = x
+ }
+
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets Link to a copy of data.
+func (link *Link) UnmarshalJSON(data []byte) error {
+ type LinkBis Link
+ var x LinkBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "operationRef")
+ delete(x.Extensions, "operationId")
+ delete(x.Extensions, "description")
+ delete(x.Extensions, "parameters")
+ delete(x.Extensions, "server")
+ delete(x.Extensions, "requestBody")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *link = Link(x)
+ return nil
+}
+
+// Validate returns an error if Link does not comply with the OpenAPI spec.
+func (link *Link) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if link.OperationID == "" && link.OperationRef == "" {
+ return errors.New("missing operationId or operationRef on link")
+ }
+ if link.OperationID != "" && link.OperationRef != "" {
+ return fmt.Errorf("operationId %q and operationRef %q are mutually exclusive", link.OperationID, link.OperationRef)
+ }
+
+ return validateExtensions(ctx, link.Extensions)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/loader.go b/vendor/github.com/getkin/kin-openapi/openapi3/loader.go
new file mode 100644
index 00000000..88ab566a
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/loader.go
@@ -0,0 +1,1118 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "net/url"
+ "os"
+ "path"
+ "path/filepath"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+)
+
+var CircularReferenceError = "kin-openapi bug found: circular schema reference not handled"
+var CircularReferenceCounter = 3
+
+func foundUnresolvedRef(ref string) error {
+ return fmt.Errorf("found unresolved ref: %q", ref)
+}
+
+func failedToResolveRefFragmentPart(value, what string) error {
+ return fmt.Errorf("failed to resolve %q in fragment in URI: %q", what, value)
+}
+
+// Loader helps deserialize an OpenAPIv3 document
+type Loader struct {
+ // IsExternalRefsAllowed enables visiting other files
+ IsExternalRefsAllowed bool
+
+ // ReadFromURIFunc allows overriding the any file/URL reading func
+ ReadFromURIFunc ReadFromURIFunc
+
+ Context context.Context
+
+ rootDir string
+ rootLocation string
+
+ visitedPathItemRefs map[string]struct{}
+
+ visitedDocuments map[string]*T
+
+ visitedCallback map[*Callback]struct{}
+ visitedExample map[*Example]struct{}
+ visitedHeader map[*Header]struct{}
+ visitedLink map[*Link]struct{}
+ visitedParameter map[*Parameter]struct{}
+ visitedRequestBody map[*RequestBody]struct{}
+ visitedResponse map[*Response]struct{}
+ visitedSchema map[*Schema]struct{}
+ visitedSecurityScheme map[*SecurityScheme]struct{}
+}
+
+// NewLoader returns an empty Loader
+func NewLoader() *Loader {
+ return &Loader{
+ Context: context.Background(),
+ }
+}
+
+func (loader *Loader) resetVisitedPathItemRefs() {
+ loader.visitedPathItemRefs = make(map[string]struct{})
+}
+
+// LoadFromURI loads a spec from a remote URL
+func (loader *Loader) LoadFromURI(location *url.URL) (*T, error) {
+ loader.resetVisitedPathItemRefs()
+ return loader.loadFromURIInternal(location)
+}
+
+// LoadFromFile loads a spec from a local file path
+func (loader *Loader) LoadFromFile(location string) (*T, error) {
+ loader.rootDir = path.Dir(location)
+ return loader.LoadFromURI(&url.URL{Path: filepath.ToSlash(location)})
+}
+
+func (loader *Loader) loadFromURIInternal(location *url.URL) (*T, error) {
+ data, err := loader.readURL(location)
+ if err != nil {
+ return nil, err
+ }
+ return loader.loadFromDataWithPathInternal(data, location)
+}
+
+func (loader *Loader) allowsExternalRefs(ref string) (err error) {
+ if !loader.IsExternalRefsAllowed {
+ err = fmt.Errorf("encountered disallowed external reference: %q", ref)
+ }
+ return
+}
+
+func (loader *Loader) loadSingleElementFromURI(ref string, rootPath *url.URL, element interface{}) (*url.URL, error) {
+ if err := loader.allowsExternalRefs(ref); err != nil {
+ return nil, err
+ }
+
+ resolvedPath, err := resolvePathWithRef(ref, rootPath)
+ if err != nil {
+ return nil, err
+ }
+ if frag := resolvedPath.Fragment; frag != "" {
+ return nil, fmt.Errorf("unexpected ref fragment %q", frag)
+ }
+
+ data, err := loader.readURL(resolvedPath)
+ if err != nil {
+ return nil, err
+ }
+ if err := unmarshal(data, element); err != nil {
+ return nil, err
+ }
+
+ return resolvedPath, nil
+}
+
+func (loader *Loader) readURL(location *url.URL) ([]byte, error) {
+ if f := loader.ReadFromURIFunc; f != nil {
+ return f(loader, location)
+ }
+ return DefaultReadFromURI(loader, location)
+}
+
+// LoadFromStdin loads a spec from stdin
+func (loader *Loader) LoadFromStdin() (*T, error) {
+ return loader.LoadFromIoReader(os.Stdin)
+}
+
+// LoadFromStdin loads a spec from io.Reader
+func (loader *Loader) LoadFromIoReader(reader io.Reader) (*T, error) {
+ if reader == nil {
+ return nil, fmt.Errorf("invalid reader: %v", reader)
+ }
+
+ data, err := io.ReadAll(reader)
+ if err != nil {
+ return nil, err
+ }
+ return loader.LoadFromData(data)
+}
+
+// LoadFromData loads a spec from a byte array
+func (loader *Loader) LoadFromData(data []byte) (*T, error) {
+ loader.resetVisitedPathItemRefs()
+ doc := &T{}
+ if err := unmarshal(data, doc); err != nil {
+ return nil, err
+ }
+ if err := loader.ResolveRefsIn(doc, nil); err != nil {
+ return nil, err
+ }
+ return doc, nil
+}
+
+// LoadFromDataWithPath takes the OpenAPI document data in bytes and a path where the resolver can find referred
+// elements and returns a *T with all resolved data or an error if unable to load data or resolve refs.
+func (loader *Loader) LoadFromDataWithPath(data []byte, location *url.URL) (*T, error) {
+ loader.resetVisitedPathItemRefs()
+ return loader.loadFromDataWithPathInternal(data, location)
+}
+
+func (loader *Loader) loadFromDataWithPathInternal(data []byte, location *url.URL) (*T, error) {
+ if loader.visitedDocuments == nil {
+ loader.visitedDocuments = make(map[string]*T)
+ loader.rootLocation = location.Path
+ }
+ uri := location.String()
+ if doc, ok := loader.visitedDocuments[uri]; ok {
+ return doc, nil
+ }
+
+ doc := &T{}
+ loader.visitedDocuments[uri] = doc
+
+ if err := unmarshal(data, doc); err != nil {
+ return nil, err
+ }
+ if err := loader.ResolveRefsIn(doc, location); err != nil {
+ return nil, err
+ }
+
+ return doc, nil
+}
+
+// ResolveRefsIn expands references if for instance spec was just unmarshaled
+func (loader *Loader) ResolveRefsIn(doc *T, location *url.URL) (err error) {
+ if loader.Context == nil {
+ loader.Context = context.Background()
+ }
+
+ if loader.visitedPathItemRefs == nil {
+ loader.resetVisitedPathItemRefs()
+ }
+
+ if components := doc.Components; components != nil {
+ for _, component := range components.Headers {
+ if err = loader.resolveHeaderRef(doc, component, location); err != nil {
+ return
+ }
+ }
+ for _, component := range components.Parameters {
+ if err = loader.resolveParameterRef(doc, component, location); err != nil {
+ return
+ }
+ }
+ for _, component := range components.RequestBodies {
+ if err = loader.resolveRequestBodyRef(doc, component, location); err != nil {
+ return
+ }
+ }
+ for _, component := range components.Responses {
+ if err = loader.resolveResponseRef(doc, component, location); err != nil {
+ return
+ }
+ }
+ for _, component := range components.Schemas {
+ if err = loader.resolveSchemaRef(doc, component, location, []string{}); err != nil {
+ return
+ }
+ }
+ for _, component := range components.SecuritySchemes {
+ if err = loader.resolveSecuritySchemeRef(doc, component, location); err != nil {
+ return
+ }
+ }
+
+ examples := make([]string, 0, len(components.Examples))
+ for name := range components.Examples {
+ examples = append(examples, name)
+ }
+ sort.Strings(examples)
+ for _, name := range examples {
+ component := components.Examples[name]
+ if err = loader.resolveExampleRef(doc, component, location); err != nil {
+ return
+ }
+ }
+
+ for _, component := range components.Callbacks {
+ if err = loader.resolveCallbackRef(doc, component, location); err != nil {
+ return
+ }
+ }
+ }
+
+ // Visit all operations
+ for _, pathItem := range doc.Paths.Map() {
+ if pathItem == nil {
+ continue
+ }
+ if err = loader.resolvePathItemRef(doc, pathItem, location); err != nil {
+ return
+ }
+ }
+
+ return
+}
+
+func join(basePath *url.URL, relativePath *url.URL) *url.URL {
+ if basePath == nil {
+ return relativePath
+ }
+ newPath := *basePath
+ newPath.Path = path.Join(path.Dir(newPath.Path), relativePath.Path)
+ return &newPath
+}
+
+func resolvePath(basePath *url.URL, componentPath *url.URL) *url.URL {
+ if is_file(componentPath) {
+ // support absolute paths
+ if componentPath.Path[0] == '/' {
+ return componentPath
+ }
+ return join(basePath, componentPath)
+ }
+ return componentPath
+}
+
+func resolvePathWithRef(ref string, rootPath *url.URL) (*url.URL, error) {
+ parsedURL, err := url.Parse(ref)
+ if err != nil {
+ return nil, fmt.Errorf("cannot parse reference: %q: %w", ref, err)
+ }
+
+ resolvedPath := resolvePath(rootPath, parsedURL)
+ resolvedPath.Fragment = parsedURL.Fragment
+ return resolvedPath, nil
+}
+
+func isSingleRefElement(ref string) bool {
+ return !strings.Contains(ref, "#")
+}
+
+func (loader *Loader) resolveComponent(doc *T, ref string, path *url.URL, resolved interface{}) (
+ componentDoc *T,
+ componentPath *url.URL,
+ err error,
+) {
+ if componentDoc, ref, componentPath, err = loader.resolveRef(doc, ref, path); err != nil {
+ return nil, nil, err
+ }
+
+ parsedURL, err := url.Parse(ref)
+ if err != nil {
+ return nil, nil, fmt.Errorf("cannot parse reference: %q: %v", ref, parsedURL)
+ }
+ fragment := parsedURL.Fragment
+ if fragment == "" {
+ fragment = "/"
+ }
+ if fragment[0] != '/' {
+ return nil, nil, fmt.Errorf("expected fragment prefix '#/' in URI %q", ref)
+ }
+
+ drill := func(cursor interface{}) (interface{}, error) {
+ for _, pathPart := range strings.Split(fragment[1:], "/") {
+ pathPart = unescapeRefString(pathPart)
+ attempted := false
+
+ switch c := cursor.(type) {
+ // Special case of T
+ // See issue856: a ref to doc => we assume that doc is a T => things live in T.Extensions
+ case *T:
+ if pathPart == "" {
+ cursor = c.Extensions
+ attempted = true
+ }
+
+ // Special case due to multijson
+ case *SchemaRef:
+ if pathPart == "additionalProperties" {
+ if ap := c.Value.AdditionalProperties.Has; ap != nil {
+ cursor = *ap
+ } else {
+ cursor = c.Value.AdditionalProperties.Schema
+ }
+ attempted = true
+ }
+
+ case *Responses:
+ cursor = c.m // m map[string]*ResponseRef
+ case *Callback:
+ cursor = c.m // m map[string]*PathItem
+ case *Paths:
+ cursor = c.m // m map[string]*PathItem
+ }
+
+ if !attempted {
+ if cursor, err = drillIntoField(cursor, pathPart); err != nil {
+ e := failedToResolveRefFragmentPart(ref, pathPart)
+ return nil, fmt.Errorf("%s: %w", e, err)
+ }
+ }
+
+ if cursor == nil {
+ return nil, failedToResolveRefFragmentPart(ref, pathPart)
+ }
+ }
+ return cursor, nil
+ }
+ var cursor interface{}
+ if cursor, err = drill(componentDoc); err != nil {
+ if path == nil {
+ return nil, nil, err
+ }
+ var err2 error
+ data, err2 := loader.readURL(path)
+ if err2 != nil {
+ return nil, nil, err
+ }
+ if err2 = unmarshal(data, &cursor); err2 != nil {
+ return nil, nil, err
+ }
+ if cursor, err2 = drill(cursor); err2 != nil || cursor == nil {
+ return nil, nil, err
+ }
+ err = nil
+ }
+
+ switch {
+ case reflect.TypeOf(cursor) == reflect.TypeOf(resolved):
+ reflect.ValueOf(resolved).Elem().Set(reflect.ValueOf(cursor).Elem())
+ return componentDoc, componentPath, nil
+
+ case reflect.TypeOf(cursor) == reflect.TypeOf(map[string]interface{}{}):
+ codec := func(got, expect interface{}) error {
+ enc, err := json.Marshal(got)
+ if err != nil {
+ return err
+ }
+ if err = json.Unmarshal(enc, expect); err != nil {
+ return err
+ }
+ return nil
+ }
+ if err := codec(cursor, resolved); err != nil {
+ return nil, nil, fmt.Errorf("bad data in %q (expecting %s)", ref, readableType(resolved))
+ }
+ return componentDoc, componentPath, nil
+
+ default:
+ return nil, nil, fmt.Errorf("bad data in %q (expecting %s)", ref, readableType(resolved))
+ }
+}
+
+func readableType(x interface{}) string {
+ switch x.(type) {
+ case *Callback:
+ return "callback object"
+ case *CallbackRef:
+ return "ref to callback object"
+ case *ExampleRef:
+ return "ref to example object"
+ case *HeaderRef:
+ return "ref to header object"
+ case *LinkRef:
+ return "ref to link object"
+ case *ParameterRef:
+ return "ref to parameter object"
+ case *PathItem:
+ return "pathItem object"
+ case *RequestBodyRef:
+ return "ref to requestBody object"
+ case *ResponseRef:
+ return "ref to response object"
+ case *SchemaRef:
+ return "ref to schema object"
+ case *SecuritySchemeRef:
+ return "ref to securityScheme object"
+ default:
+ panic(fmt.Sprintf("unreachable %T", x))
+ }
+}
+
+func drillIntoField(cursor interface{}, fieldName string) (interface{}, error) {
+ switch val := reflect.Indirect(reflect.ValueOf(cursor)); val.Kind() {
+
+ case reflect.Map:
+ elementValue := val.MapIndex(reflect.ValueOf(fieldName))
+ if !elementValue.IsValid() {
+ return nil, fmt.Errorf("map key %q not found", fieldName)
+ }
+ return elementValue.Interface(), nil
+
+ case reflect.Slice:
+ i, err := strconv.ParseUint(fieldName, 10, 32)
+ if err != nil {
+ return nil, err
+ }
+ index := int(i)
+ if 0 > index || index >= val.Len() {
+ return nil, errors.New("slice index out of bounds")
+ }
+ return val.Index(index).Interface(), nil
+
+ case reflect.Struct:
+ hasFields := false
+ for i := 0; i < val.NumField(); i++ {
+ hasFields = true
+ if yamlTag := val.Type().Field(i).Tag.Get("yaml"); yamlTag != "-" {
+ if tagName := strings.Split(yamlTag, ",")[0]; tagName != "" {
+ if fieldName == tagName {
+ return val.Field(i).Interface(), nil
+ }
+ }
+ }
+ }
+
+ // if cursor is a "ref wrapper" struct (e.g. RequestBodyRef),
+ if _, ok := val.Type().FieldByName("Value"); ok {
+ // try digging into its Value field
+ return drillIntoField(val.FieldByName("Value").Interface(), fieldName)
+ }
+ if hasFields {
+ if ff := val.Type().Field(0); ff.PkgPath == "" && ff.Name == "Extensions" {
+ extensions := val.Field(0).Interface().(map[string]interface{})
+ if enc, ok := extensions[fieldName]; ok {
+ return enc, nil
+ }
+ }
+ }
+ return nil, fmt.Errorf("struct field %q not found", fieldName)
+
+ default:
+ return nil, errors.New("not a map, slice nor struct")
+ }
+}
+
+func (loader *Loader) resolveRef(doc *T, ref string, path *url.URL) (*T, string, *url.URL, error) {
+ if ref != "" && ref[0] == '#' {
+ return doc, ref, path, nil
+ }
+
+ if err := loader.allowsExternalRefs(ref); err != nil {
+ return nil, "", nil, err
+ }
+
+ resolvedPath, err := resolvePathWithRef(ref, path)
+ if err != nil {
+ return nil, "", nil, err
+ }
+ fragment := "#" + resolvedPath.Fragment
+ resolvedPath.Fragment = ""
+
+ if doc, err = loader.loadFromURIInternal(resolvedPath); err != nil {
+ return nil, "", nil, fmt.Errorf("error resolving reference %q: %w", ref, err)
+ }
+
+ return doc, fragment, resolvedPath, nil
+}
+
+var (
+ errMUSTCallback = errors.New("invalid callback: value MUST be an object")
+ errMUSTExample = errors.New("invalid example: value MUST be an object")
+ errMUSTHeader = errors.New("invalid header: value MUST be an object")
+ errMUSTLink = errors.New("invalid link: value MUST be an object")
+ errMUSTParameter = errors.New("invalid parameter: value MUST be an object")
+ errMUSTPathItem = errors.New("invalid path item: value MUST be an object")
+ errMUSTRequestBody = errors.New("invalid requestBody: value MUST be an object")
+ errMUSTResponse = errors.New("invalid response: value MUST be an object")
+ errMUSTSchema = errors.New("invalid schema: value MUST be an object")
+ errMUSTSecurityScheme = errors.New("invalid securityScheme: value MUST be an object")
+)
+
+func (loader *Loader) resolveHeaderRef(doc *T, component *HeaderRef, documentPath *url.URL) (err error) {
+ if component.isEmpty() {
+ return errMUSTHeader
+ }
+
+ if component.Value != nil {
+ if loader.visitedHeader == nil {
+ loader.visitedHeader = make(map[*Header]struct{})
+ }
+ if _, ok := loader.visitedHeader[component.Value]; ok {
+ return nil
+ }
+ loader.visitedHeader[component.Value] = struct{}{}
+ }
+
+ if ref := component.Ref; ref != "" {
+ if isSingleRefElement(ref) {
+ var header Header
+ if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, &header); err != nil {
+ return err
+ }
+ component.Value = &header
+ } else {
+ var resolved HeaderRef
+ doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved)
+ if err != nil {
+ return err
+ }
+ if err := loader.resolveHeaderRef(doc, &resolved, componentPath); err != nil {
+ if err == errMUSTHeader {
+ return nil
+ }
+ return err
+ }
+ component.Value = resolved.Value
+ }
+ }
+ value := component.Value
+ if value == nil {
+ return nil
+ }
+
+ if schema := value.Schema; schema != nil {
+ if err := loader.resolveSchemaRef(doc, schema, documentPath, []string{}); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (loader *Loader) resolveParameterRef(doc *T, component *ParameterRef, documentPath *url.URL) (err error) {
+ if component.isEmpty() {
+ return errMUSTParameter
+ }
+
+ if component.Value != nil {
+ if loader.visitedParameter == nil {
+ loader.visitedParameter = make(map[*Parameter]struct{})
+ }
+ if _, ok := loader.visitedParameter[component.Value]; ok {
+ return nil
+ }
+ loader.visitedParameter[component.Value] = struct{}{}
+ }
+
+ if ref := component.Ref; ref != "" {
+ if isSingleRefElement(ref) {
+ var param Parameter
+ if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, ¶m); err != nil {
+ return err
+ }
+ component.Value = ¶m
+ } else {
+ var resolved ParameterRef
+ doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved)
+ if err != nil {
+ return err
+ }
+ if err := loader.resolveParameterRef(doc, &resolved, componentPath); err != nil {
+ if err == errMUSTParameter {
+ return nil
+ }
+ return err
+ }
+ component.Value = resolved.Value
+ }
+ }
+ value := component.Value
+ if value == nil {
+ return nil
+ }
+
+ if value.Content != nil && value.Schema != nil {
+ return errors.New("cannot contain both schema and content in a parameter")
+ }
+ for _, contentType := range value.Content {
+ if schema := contentType.Schema; schema != nil {
+ if err := loader.resolveSchemaRef(doc, schema, documentPath, []string{}); err != nil {
+ return err
+ }
+ }
+ }
+ if schema := value.Schema; schema != nil {
+ if err := loader.resolveSchemaRef(doc, schema, documentPath, []string{}); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (loader *Loader) resolveRequestBodyRef(doc *T, component *RequestBodyRef, documentPath *url.URL) (err error) {
+ if component.isEmpty() {
+ return errMUSTRequestBody
+ }
+
+ if component.Value != nil {
+ if loader.visitedRequestBody == nil {
+ loader.visitedRequestBody = make(map[*RequestBody]struct{})
+ }
+ if _, ok := loader.visitedRequestBody[component.Value]; ok {
+ return nil
+ }
+ loader.visitedRequestBody[component.Value] = struct{}{}
+ }
+
+ if ref := component.Ref; ref != "" {
+ if isSingleRefElement(ref) {
+ var requestBody RequestBody
+ if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, &requestBody); err != nil {
+ return err
+ }
+ component.Value = &requestBody
+ } else {
+ var resolved RequestBodyRef
+ doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved)
+ if err != nil {
+ return err
+ }
+ if err = loader.resolveRequestBodyRef(doc, &resolved, componentPath); err != nil {
+ if err == errMUSTRequestBody {
+ return nil
+ }
+ return err
+ }
+ component.Value = resolved.Value
+ }
+ }
+ value := component.Value
+ if value == nil {
+ return nil
+ }
+
+ for _, contentType := range value.Content {
+ if contentType == nil {
+ continue
+ }
+ examples := make([]string, 0, len(contentType.Examples))
+ for name := range contentType.Examples {
+ examples = append(examples, name)
+ }
+ sort.Strings(examples)
+ for _, name := range examples {
+ example := contentType.Examples[name]
+ if err := loader.resolveExampleRef(doc, example, documentPath); err != nil {
+ return err
+ }
+ contentType.Examples[name] = example
+ }
+ if schema := contentType.Schema; schema != nil {
+ if err := loader.resolveSchemaRef(doc, schema, documentPath, []string{}); err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+func (loader *Loader) resolveResponseRef(doc *T, component *ResponseRef, documentPath *url.URL) (err error) {
+ if component.isEmpty() {
+ return errMUSTResponse
+ }
+
+ if component.Value != nil {
+ if loader.visitedResponse == nil {
+ loader.visitedResponse = make(map[*Response]struct{})
+ }
+ if _, ok := loader.visitedResponse[component.Value]; ok {
+ return nil
+ }
+ loader.visitedResponse[component.Value] = struct{}{}
+ }
+
+ if ref := component.Ref; ref != "" {
+ if isSingleRefElement(ref) {
+ var resp Response
+ if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, &resp); err != nil {
+ return err
+ }
+ component.Value = &resp
+ } else {
+ var resolved ResponseRef
+ doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved)
+ if err != nil {
+ return err
+ }
+ if err := loader.resolveResponseRef(doc, &resolved, componentPath); err != nil {
+ if err == errMUSTResponse {
+ return nil
+ }
+ return err
+ }
+ component.Value = resolved.Value
+ }
+ }
+ value := component.Value
+ if value == nil {
+ return nil
+ }
+
+ for _, header := range value.Headers {
+ if err := loader.resolveHeaderRef(doc, header, documentPath); err != nil {
+ return err
+ }
+ }
+ for _, contentType := range value.Content {
+ if contentType == nil {
+ continue
+ }
+ examples := make([]string, 0, len(contentType.Examples))
+ for name := range contentType.Examples {
+ examples = append(examples, name)
+ }
+ sort.Strings(examples)
+ for _, name := range examples {
+ example := contentType.Examples[name]
+ if err := loader.resolveExampleRef(doc, example, documentPath); err != nil {
+ return err
+ }
+ contentType.Examples[name] = example
+ }
+ if schema := contentType.Schema; schema != nil {
+ if err := loader.resolveSchemaRef(doc, schema, documentPath, []string{}); err != nil {
+ return err
+ }
+ contentType.Schema = schema
+ }
+ }
+ for _, link := range value.Links {
+ if err := loader.resolveLinkRef(doc, link, documentPath); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (loader *Loader) resolveSchemaRef(doc *T, component *SchemaRef, documentPath *url.URL, visited []string) (err error) {
+ if component.isEmpty() {
+ return errMUSTSchema
+ }
+
+ if component.Value != nil {
+ if loader.visitedSchema == nil {
+ loader.visitedSchema = make(map[*Schema]struct{})
+ }
+ if _, ok := loader.visitedSchema[component.Value]; ok {
+ return nil
+ }
+ loader.visitedSchema[component.Value] = struct{}{}
+ }
+
+ if ref := component.Ref; ref != "" {
+ if isSingleRefElement(ref) {
+ var schema Schema
+ if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, &schema); err != nil {
+ return err
+ }
+ component.Value = &schema
+ } else {
+ if visitedLimit(visited, ref) {
+ visited = append(visited, ref)
+ return fmt.Errorf("%s with length %d - %s", CircularReferenceError, len(visited), strings.Join(visited, " -> "))
+ }
+ visited = append(visited, ref)
+
+ var resolved SchemaRef
+ doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved)
+ if err != nil {
+ return err
+ }
+ if err := loader.resolveSchemaRef(doc, &resolved, componentPath, visited); err != nil {
+ if err == errMUSTSchema {
+ return nil
+ }
+ return err
+ }
+ component.Value = resolved.Value
+ }
+ if loader.visitedSchema == nil {
+ loader.visitedSchema = make(map[*Schema]struct{})
+ }
+ loader.visitedSchema[component.Value] = struct{}{}
+ }
+ value := component.Value
+ if value == nil {
+ return nil
+ }
+
+ // ResolveRefs referred schemas
+ if v := value.Items; v != nil {
+ if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil {
+ return err
+ }
+ }
+ for _, v := range value.Properties {
+ if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil {
+ return err
+ }
+ }
+ if v := value.AdditionalProperties.Schema; v != nil {
+ if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil {
+ return err
+ }
+ }
+ if v := value.Not; v != nil {
+ if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil {
+ return err
+ }
+ }
+ for _, v := range value.AllOf {
+ if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil {
+ return err
+ }
+ }
+ for _, v := range value.AnyOf {
+ if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil {
+ return err
+ }
+ }
+ for _, v := range value.OneOf {
+ if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (loader *Loader) resolveSecuritySchemeRef(doc *T, component *SecuritySchemeRef, documentPath *url.URL) (err error) {
+ if component.isEmpty() {
+ return errMUSTSecurityScheme
+ }
+
+ if component.Value != nil {
+ if loader.visitedSecurityScheme == nil {
+ loader.visitedSecurityScheme = make(map[*SecurityScheme]struct{})
+ }
+ if _, ok := loader.visitedSecurityScheme[component.Value]; ok {
+ return nil
+ }
+ loader.visitedSecurityScheme[component.Value] = struct{}{}
+ }
+
+ if ref := component.Ref; ref != "" {
+ if isSingleRefElement(ref) {
+ var scheme SecurityScheme
+ if _, err = loader.loadSingleElementFromURI(ref, documentPath, &scheme); err != nil {
+ return err
+ }
+ component.Value = &scheme
+ } else {
+ var resolved SecuritySchemeRef
+ doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved)
+ if err != nil {
+ return err
+ }
+ if err := loader.resolveSecuritySchemeRef(doc, &resolved, componentPath); err != nil {
+ if err == errMUSTSecurityScheme {
+ return nil
+ }
+ return err
+ }
+ component.Value = resolved.Value
+ }
+ }
+ return nil
+}
+
+func (loader *Loader) resolveExampleRef(doc *T, component *ExampleRef, documentPath *url.URL) (err error) {
+ if component.isEmpty() {
+ return errMUSTExample
+ }
+
+ if component.Value != nil {
+ if loader.visitedExample == nil {
+ loader.visitedExample = make(map[*Example]struct{})
+ }
+ if _, ok := loader.visitedExample[component.Value]; ok {
+ return nil
+ }
+ loader.visitedExample[component.Value] = struct{}{}
+ }
+
+ if ref := component.Ref; ref != "" {
+ if isSingleRefElement(ref) {
+ var example Example
+ if _, err = loader.loadSingleElementFromURI(ref, documentPath, &example); err != nil {
+ return err
+ }
+ component.Value = &example
+ } else {
+ var resolved ExampleRef
+ doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved)
+ if err != nil {
+ return err
+ }
+ if err := loader.resolveExampleRef(doc, &resolved, componentPath); err != nil {
+ if err == errMUSTExample {
+ return nil
+ }
+ return err
+ }
+ component.Value = resolved.Value
+ }
+ }
+ return nil
+}
+
+func (loader *Loader) resolveCallbackRef(doc *T, component *CallbackRef, documentPath *url.URL) (err error) {
+ if component.isEmpty() {
+ return errMUSTCallback
+ }
+
+ if component.Value != nil {
+ if loader.visitedCallback == nil {
+ loader.visitedCallback = make(map[*Callback]struct{})
+ }
+ if _, ok := loader.visitedCallback[component.Value]; ok {
+ return nil
+ }
+ loader.visitedCallback[component.Value] = struct{}{}
+ }
+
+ if ref := component.Ref; ref != "" {
+ if isSingleRefElement(ref) {
+ var resolved Callback
+ if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, &resolved); err != nil {
+ return err
+ }
+ component.Value = &resolved
+ } else {
+ var resolved CallbackRef
+ doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved)
+ if err != nil {
+ return err
+ }
+ if err = loader.resolveCallbackRef(doc, &resolved, componentPath); err != nil {
+ if err == errMUSTCallback {
+ return nil
+ }
+ return err
+ }
+ component.Value = resolved.Value
+ }
+ }
+ value := component.Value
+ if value == nil {
+ return nil
+ }
+
+ for _, pathItem := range value.Map() {
+ if err = loader.resolvePathItemRef(doc, pathItem, documentPath); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (loader *Loader) resolveLinkRef(doc *T, component *LinkRef, documentPath *url.URL) (err error) {
+ if component.isEmpty() {
+ return errMUSTLink
+ }
+
+ if component.Value != nil {
+ if loader.visitedLink == nil {
+ loader.visitedLink = make(map[*Link]struct{})
+ }
+ if _, ok := loader.visitedLink[component.Value]; ok {
+ return nil
+ }
+ loader.visitedLink[component.Value] = struct{}{}
+ }
+
+ if ref := component.Ref; ref != "" {
+ if isSingleRefElement(ref) {
+ var link Link
+ if _, err = loader.loadSingleElementFromURI(ref, documentPath, &link); err != nil {
+ return err
+ }
+ component.Value = &link
+ } else {
+ var resolved LinkRef
+ doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved)
+ if err != nil {
+ return err
+ }
+ if err := loader.resolveLinkRef(doc, &resolved, componentPath); err != nil {
+ if err == errMUSTLink {
+ return nil
+ }
+ return err
+ }
+ component.Value = resolved.Value
+ }
+ }
+ return nil
+}
+
+func (loader *Loader) resolvePathItemRef(doc *T, pathItem *PathItem, documentPath *url.URL) (err error) {
+ if pathItem == nil {
+ err = errMUSTPathItem
+ return
+ }
+
+ if ref := pathItem.Ref; ref != "" {
+ if !pathItem.isEmpty() {
+ return
+ }
+ if isSingleRefElement(ref) {
+ var p PathItem
+ if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, &p); err != nil {
+ return
+ }
+ *pathItem = p
+ } else {
+ var resolved PathItem
+ if doc, documentPath, err = loader.resolveComponent(doc, ref, documentPath, &resolved); err != nil {
+ if err == errMUSTPathItem {
+ return nil
+ }
+ return
+ }
+ *pathItem = resolved
+ }
+ pathItem.Ref = ref
+ }
+
+ for _, parameter := range pathItem.Parameters {
+ if err = loader.resolveParameterRef(doc, parameter, documentPath); err != nil {
+ return
+ }
+ }
+ for _, operation := range pathItem.Operations() {
+ for _, parameter := range operation.Parameters {
+ if err = loader.resolveParameterRef(doc, parameter, documentPath); err != nil {
+ return
+ }
+ }
+ if requestBody := operation.RequestBody; requestBody != nil {
+ if err = loader.resolveRequestBodyRef(doc, requestBody, documentPath); err != nil {
+ return
+ }
+ }
+ for _, response := range operation.Responses.Map() {
+ if err = loader.resolveResponseRef(doc, response, documentPath); err != nil {
+ return
+ }
+ }
+ for _, callback := range operation.Callbacks {
+ if err = loader.resolveCallbackRef(doc, callback, documentPath); err != nil {
+ return
+ }
+ }
+ }
+ return
+}
+
+func unescapeRefString(ref string) string {
+ return strings.Replace(strings.Replace(ref, "~1", "/", -1), "~0", "~", -1)
+}
+
+func visitedLimit(visited []string, ref string) bool {
+ visitedCount := 0
+ for _, v := range visited {
+ if v == ref {
+ visitedCount++
+ if visitedCount >= CircularReferenceCounter {
+ return true
+ }
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/loader_uri_reader.go b/vendor/github.com/getkin/kin-openapi/openapi3/loader_uri_reader.go
new file mode 100644
index 00000000..ba7b5f24
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/loader_uri_reader.go
@@ -0,0 +1,116 @@
+package openapi3
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "os"
+ "path/filepath"
+ "sync"
+)
+
+// ReadFromURIFunc defines a function which reads the contents of a resource
+// located at a URI.
+type ReadFromURIFunc func(loader *Loader, url *url.URL) ([]byte, error)
+
+var uriMu = &sync.RWMutex{}
+
+// ErrURINotSupported indicates the ReadFromURIFunc does not know how to handle a
+// given URI.
+var ErrURINotSupported = errors.New("unsupported URI")
+
+// ReadFromURIs returns a ReadFromURIFunc which tries to read a URI using the
+// given reader functions, in the same order. If a reader function does not
+// support the URI and returns ErrURINotSupported, the next function is checked
+// until a match is found, or the URI is not supported by any.
+func ReadFromURIs(readers ...ReadFromURIFunc) ReadFromURIFunc {
+ return func(loader *Loader, url *url.URL) ([]byte, error) {
+ for i := range readers {
+ buf, err := readers[i](loader, url)
+ if err == ErrURINotSupported {
+ continue
+ } else if err != nil {
+ return nil, err
+ }
+ return buf, nil
+ }
+ return nil, ErrURINotSupported
+ }
+}
+
+// DefaultReadFromURI returns a caching ReadFromURIFunc which can read remote
+// HTTP URIs and local file URIs.
+var DefaultReadFromURI = URIMapCache(ReadFromURIs(ReadFromHTTP(http.DefaultClient), ReadFromFile))
+
+// ReadFromHTTP returns a ReadFromURIFunc which uses the given http.Client to
+// read the contents from a remote HTTP URI. This client may be customized to
+// implement timeouts, RFC 7234 caching, etc.
+func ReadFromHTTP(cl *http.Client) ReadFromURIFunc {
+ return func(loader *Loader, location *url.URL) ([]byte, error) {
+ if location.Scheme == "" || location.Host == "" {
+ return nil, ErrURINotSupported
+ }
+ req, err := http.NewRequest("GET", location.String(), nil)
+ if err != nil {
+ return nil, err
+ }
+ resp, err := cl.Do(req)
+ if err != nil {
+ return nil, err
+ }
+ defer resp.Body.Close()
+ if resp.StatusCode > 399 {
+ return nil, fmt.Errorf("error loading %q: request returned status code %d", location.String(), resp.StatusCode)
+ }
+ return io.ReadAll(resp.Body)
+ }
+}
+
+func is_file(location *url.URL) bool {
+ return location.Path != "" &&
+ location.Host == "" &&
+ (location.Scheme == "" || location.Scheme == "file")
+}
+
+// ReadFromFile is a ReadFromURIFunc which reads local file URIs.
+func ReadFromFile(loader *Loader, location *url.URL) ([]byte, error) {
+ if !is_file(location) {
+ return nil, ErrURINotSupported
+ }
+ return os.ReadFile(location.Path)
+}
+
+// URIMapCache returns a ReadFromURIFunc that caches the contents read from URI
+// locations in a simple map. This cache implementation is suitable for
+// short-lived processes such as command-line tools which process OpenAPI
+// documents.
+func URIMapCache(reader ReadFromURIFunc) ReadFromURIFunc {
+ cache := map[string][]byte{}
+ return func(loader *Loader, location *url.URL) (buf []byte, err error) {
+ if location.Scheme == "" || location.Scheme == "file" {
+ if !filepath.IsAbs(location.Path) {
+ // Do not cache relative file paths; this can cause trouble if
+ // the current working directory changes when processing
+ // multiple top-level documents.
+ return reader(loader, location)
+ }
+ }
+ uri := location.String()
+ var ok bool
+ uriMu.RLock()
+ if buf, ok = cache[uri]; ok {
+ uriMu.RUnlock()
+ return
+ }
+ uriMu.RUnlock()
+ if buf, err = reader(loader, location); err != nil {
+ return
+ }
+ uriMu.Lock()
+ defer uriMu.Unlock()
+ cache[uri] = buf
+ return
+ }
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/maplike.go b/vendor/github.com/getkin/kin-openapi/openapi3/maplike.go
new file mode 100644
index 00000000..b27cbf6c
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/maplike.go
@@ -0,0 +1,366 @@
+package openapi3
+
+import (
+ "encoding/json"
+ "sort"
+ "strings"
+
+ "github.com/go-openapi/jsonpointer"
+)
+
+// NewResponsesWithCapacity builds a responses object of the given capacity.
+func NewResponsesWithCapacity(cap int) *Responses {
+ if cap == 0 {
+ return &Responses{m: make(map[string]*ResponseRef)}
+ }
+ return &Responses{m: make(map[string]*ResponseRef, cap)}
+}
+
+// Value returns the responses for key or nil
+func (responses *Responses) Value(key string) *ResponseRef {
+ if responses.Len() == 0 {
+ return nil
+ }
+ return responses.m[key]
+}
+
+// Set adds or replaces key 'key' of 'responses' with 'value'.
+// Note: 'responses' MUST be non-nil
+func (responses *Responses) Set(key string, value *ResponseRef) {
+ if responses.m == nil {
+ responses.m = make(map[string]*ResponseRef)
+ }
+ responses.m[key] = value
+}
+
+// Len returns the amount of keys in responses excluding responses.Extensions.
+func (responses *Responses) Len() int {
+ if responses == nil || responses.m == nil {
+ return 0
+ }
+ return len(responses.m)
+}
+
+// Delete removes the entry associated with key 'key' from 'responses'.
+func (responses *Responses) Delete(key string) {
+ if responses != nil && responses.m != nil {
+ delete(responses.m, key)
+ }
+}
+
+// Map returns responses as a 'map'.
+// Note: iteration on Go maps is not ordered.
+func (responses *Responses) Map() (m map[string]*ResponseRef) {
+ if responses == nil || len(responses.m) == 0 {
+ return make(map[string]*ResponseRef)
+ }
+ m = make(map[string]*ResponseRef, len(responses.m))
+ for k, v := range responses.m {
+ m[k] = v
+ }
+ return
+}
+
+var _ jsonpointer.JSONPointable = (*Responses)(nil)
+
+// JSONLookup implements https://github.com/go-openapi/jsonpointer#JSONPointable
+func (responses Responses) JSONLookup(token string) (interface{}, error) {
+ if v := responses.Value(token); v == nil {
+ vv, _, err := jsonpointer.GetForToken(responses.Extensions, token)
+ return vv, err
+ } else if ref := v.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ } else {
+ var vv *Response = v.Value
+ return vv, nil
+ }
+}
+
+// MarshalJSON returns the JSON encoding of Responses.
+func (responses *Responses) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, responses.Len()+len(responses.Extensions))
+ for k, v := range responses.Extensions {
+ m[k] = v
+ }
+ for k, v := range responses.Map() {
+ m[k] = v
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets Responses to a copy of data.
+func (responses *Responses) UnmarshalJSON(data []byte) (err error) {
+ var m map[string]interface{}
+ if err = json.Unmarshal(data, &m); err != nil {
+ return
+ }
+
+ ks := make([]string, 0, len(m))
+ for k := range m {
+ ks = append(ks, k)
+ }
+ sort.Strings(ks)
+
+ x := Responses{
+ Extensions: make(map[string]interface{}),
+ m: make(map[string]*ResponseRef, len(m)),
+ }
+
+ for _, k := range ks {
+ v := m[k]
+ if strings.HasPrefix(k, "x-") {
+ x.Extensions[k] = v
+ continue
+ }
+
+ var data []byte
+ if data, err = json.Marshal(v); err != nil {
+ return
+ }
+ var vv ResponseRef
+ if err = vv.UnmarshalJSON(data); err != nil {
+ return
+ }
+ x.m[k] = &vv
+ }
+ *responses = x
+ return
+}
+
+// NewCallbackWithCapacity builds a callback object of the given capacity.
+func NewCallbackWithCapacity(cap int) *Callback {
+ if cap == 0 {
+ return &Callback{m: make(map[string]*PathItem)}
+ }
+ return &Callback{m: make(map[string]*PathItem, cap)}
+}
+
+// Value returns the callback for key or nil
+func (callback *Callback) Value(key string) *PathItem {
+ if callback.Len() == 0 {
+ return nil
+ }
+ return callback.m[key]
+}
+
+// Set adds or replaces key 'key' of 'callback' with 'value'.
+// Note: 'callback' MUST be non-nil
+func (callback *Callback) Set(key string, value *PathItem) {
+ if callback.m == nil {
+ callback.m = make(map[string]*PathItem)
+ }
+ callback.m[key] = value
+}
+
+// Len returns the amount of keys in callback excluding callback.Extensions.
+func (callback *Callback) Len() int {
+ if callback == nil || callback.m == nil {
+ return 0
+ }
+ return len(callback.m)
+}
+
+// Delete removes the entry associated with key 'key' from 'callback'.
+func (callback *Callback) Delete(key string) {
+ if callback != nil && callback.m != nil {
+ delete(callback.m, key)
+ }
+}
+
+// Map returns callback as a 'map'.
+// Note: iteration on Go maps is not ordered.
+func (callback *Callback) Map() (m map[string]*PathItem) {
+ if callback == nil || len(callback.m) == 0 {
+ return make(map[string]*PathItem)
+ }
+ m = make(map[string]*PathItem, len(callback.m))
+ for k, v := range callback.m {
+ m[k] = v
+ }
+ return
+}
+
+var _ jsonpointer.JSONPointable = (*Callback)(nil)
+
+// JSONLookup implements https://github.com/go-openapi/jsonpointer#JSONPointable
+func (callback Callback) JSONLookup(token string) (interface{}, error) {
+ if v := callback.Value(token); v == nil {
+ vv, _, err := jsonpointer.GetForToken(callback.Extensions, token)
+ return vv, err
+ } else if ref := v.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ } else {
+ var vv *PathItem = v
+ return vv, nil
+ }
+}
+
+// MarshalJSON returns the JSON encoding of Callback.
+func (callback *Callback) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, callback.Len()+len(callback.Extensions))
+ for k, v := range callback.Extensions {
+ m[k] = v
+ }
+ for k, v := range callback.Map() {
+ m[k] = v
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets Callback to a copy of data.
+func (callback *Callback) UnmarshalJSON(data []byte) (err error) {
+ var m map[string]interface{}
+ if err = json.Unmarshal(data, &m); err != nil {
+ return
+ }
+
+ ks := make([]string, 0, len(m))
+ for k := range m {
+ ks = append(ks, k)
+ }
+ sort.Strings(ks)
+
+ x := Callback{
+ Extensions: make(map[string]interface{}),
+ m: make(map[string]*PathItem, len(m)),
+ }
+
+ for _, k := range ks {
+ v := m[k]
+ if strings.HasPrefix(k, "x-") {
+ x.Extensions[k] = v
+ continue
+ }
+
+ var data []byte
+ if data, err = json.Marshal(v); err != nil {
+ return
+ }
+ var vv PathItem
+ if err = vv.UnmarshalJSON(data); err != nil {
+ return
+ }
+ x.m[k] = &vv
+ }
+ *callback = x
+ return
+}
+
+// NewPathsWithCapacity builds a paths object of the given capacity.
+func NewPathsWithCapacity(cap int) *Paths {
+ if cap == 0 {
+ return &Paths{m: make(map[string]*PathItem)}
+ }
+ return &Paths{m: make(map[string]*PathItem, cap)}
+}
+
+// Value returns the paths for key or nil
+func (paths *Paths) Value(key string) *PathItem {
+ if paths.Len() == 0 {
+ return nil
+ }
+ return paths.m[key]
+}
+
+// Set adds or replaces key 'key' of 'paths' with 'value'.
+// Note: 'paths' MUST be non-nil
+func (paths *Paths) Set(key string, value *PathItem) {
+ if paths.m == nil {
+ paths.m = make(map[string]*PathItem)
+ }
+ paths.m[key] = value
+}
+
+// Len returns the amount of keys in paths excluding paths.Extensions.
+func (paths *Paths) Len() int {
+ if paths == nil || paths.m == nil {
+ return 0
+ }
+ return len(paths.m)
+}
+
+// Delete removes the entry associated with key 'key' from 'paths'.
+func (paths *Paths) Delete(key string) {
+ if paths != nil && paths.m != nil {
+ delete(paths.m, key)
+ }
+}
+
+// Map returns paths as a 'map'.
+// Note: iteration on Go maps is not ordered.
+func (paths *Paths) Map() (m map[string]*PathItem) {
+ if paths == nil || len(paths.m) == 0 {
+ return make(map[string]*PathItem)
+ }
+ m = make(map[string]*PathItem, len(paths.m))
+ for k, v := range paths.m {
+ m[k] = v
+ }
+ return
+}
+
+var _ jsonpointer.JSONPointable = (*Paths)(nil)
+
+// JSONLookup implements https://github.com/go-openapi/jsonpointer#JSONPointable
+func (paths Paths) JSONLookup(token string) (interface{}, error) {
+ if v := paths.Value(token); v == nil {
+ vv, _, err := jsonpointer.GetForToken(paths.Extensions, token)
+ return vv, err
+ } else if ref := v.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ } else {
+ var vv *PathItem = v
+ return vv, nil
+ }
+}
+
+// MarshalJSON returns the JSON encoding of Paths.
+func (paths *Paths) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, paths.Len()+len(paths.Extensions))
+ for k, v := range paths.Extensions {
+ m[k] = v
+ }
+ for k, v := range paths.Map() {
+ m[k] = v
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets Paths to a copy of data.
+func (paths *Paths) UnmarshalJSON(data []byte) (err error) {
+ var m map[string]interface{}
+ if err = json.Unmarshal(data, &m); err != nil {
+ return
+ }
+
+ ks := make([]string, 0, len(m))
+ for k := range m {
+ ks = append(ks, k)
+ }
+ sort.Strings(ks)
+
+ x := Paths{
+ Extensions: make(map[string]interface{}),
+ m: make(map[string]*PathItem, len(m)),
+ }
+
+ for _, k := range ks {
+ v := m[k]
+ if strings.HasPrefix(k, "x-") {
+ x.Extensions[k] = v
+ continue
+ }
+
+ var data []byte
+ if data, err = json.Marshal(v); err != nil {
+ return
+ }
+ var vv PathItem
+ if err = vv.UnmarshalJSON(data); err != nil {
+ return
+ }
+ x.m[k] = &vv
+ }
+ *paths = x
+ return
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/marsh.go b/vendor/github.com/getkin/kin-openapi/openapi3/marsh.go
new file mode 100644
index 00000000..18036ae7
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/marsh.go
@@ -0,0 +1,26 @@
+package openapi3
+
+import (
+ "encoding/json"
+ "fmt"
+ "strings"
+
+ "github.com/invopop/yaml"
+)
+
+func unmarshalError(jsonUnmarshalErr error) error {
+ if before, after, found := strings.Cut(jsonUnmarshalErr.Error(), "Bis"); found && before != "" && after != "" {
+ before = strings.ReplaceAll(before, " Go struct ", " ")
+ return fmt.Errorf("%s%s", before, strings.ReplaceAll(after, "Bis", ""))
+ }
+ return jsonUnmarshalErr
+}
+
+func unmarshal(data []byte, v interface{}) error {
+ // See https://github.com/getkin/kin-openapi/issues/680
+ if err := json.Unmarshal(data, v); err != nil {
+ // UnmarshalStrict(data, v) TODO: investigate how ymlv3 handles duplicate map keys
+ return yaml.Unmarshal(data, v)
+ }
+ return nil
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/media_type.go b/vendor/github.com/getkin/kin-openapi/openapi3/media_type.go
new file mode 100644
index 00000000..e043a7c9
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/media_type.go
@@ -0,0 +1,170 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "sort"
+
+ "github.com/go-openapi/jsonpointer"
+)
+
+// MediaType is specified by OpenAPI/Swagger 3.0 standard.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#media-type-object
+type MediaType struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ Schema *SchemaRef `json:"schema,omitempty" yaml:"schema,omitempty"`
+ Example interface{} `json:"example,omitempty" yaml:"example,omitempty"`
+ Examples Examples `json:"examples,omitempty" yaml:"examples,omitempty"`
+ Encoding map[string]*Encoding `json:"encoding,omitempty" yaml:"encoding,omitempty"`
+}
+
+var _ jsonpointer.JSONPointable = (*MediaType)(nil)
+
+func NewMediaType() *MediaType {
+ return &MediaType{}
+}
+
+func (mediaType *MediaType) WithSchema(schema *Schema) *MediaType {
+ if schema == nil {
+ mediaType.Schema = nil
+ } else {
+ mediaType.Schema = &SchemaRef{Value: schema}
+ }
+ return mediaType
+}
+
+func (mediaType *MediaType) WithSchemaRef(schema *SchemaRef) *MediaType {
+ mediaType.Schema = schema
+ return mediaType
+}
+
+func (mediaType *MediaType) WithExample(name string, value interface{}) *MediaType {
+ example := mediaType.Examples
+ if example == nil {
+ example = make(map[string]*ExampleRef)
+ mediaType.Examples = example
+ }
+ example[name] = &ExampleRef{
+ Value: NewExample(value),
+ }
+ return mediaType
+}
+
+func (mediaType *MediaType) WithEncoding(name string, enc *Encoding) *MediaType {
+ encoding := mediaType.Encoding
+ if encoding == nil {
+ encoding = make(map[string]*Encoding)
+ mediaType.Encoding = encoding
+ }
+ encoding[name] = enc
+ return mediaType
+}
+
+// MarshalJSON returns the JSON encoding of MediaType.
+func (mediaType MediaType) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 4+len(mediaType.Extensions))
+ for k, v := range mediaType.Extensions {
+ m[k] = v
+ }
+ if x := mediaType.Schema; x != nil {
+ m["schema"] = x
+ }
+ if x := mediaType.Example; x != nil {
+ m["example"] = x
+ }
+ if x := mediaType.Examples; len(x) != 0 {
+ m["examples"] = x
+ }
+ if x := mediaType.Encoding; len(x) != 0 {
+ m["encoding"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets MediaType to a copy of data.
+func (mediaType *MediaType) UnmarshalJSON(data []byte) error {
+ type MediaTypeBis MediaType
+ var x MediaTypeBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "schema")
+ delete(x.Extensions, "example")
+ delete(x.Extensions, "examples")
+ delete(x.Extensions, "encoding")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *mediaType = MediaType(x)
+ return nil
+}
+
+// Validate returns an error if MediaType does not comply with the OpenAPI spec.
+func (mediaType *MediaType) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if mediaType == nil {
+ return nil
+ }
+ if schema := mediaType.Schema; schema != nil {
+ if err := schema.Validate(ctx); err != nil {
+ return err
+ }
+
+ if mediaType.Example != nil && mediaType.Examples != nil {
+ return errors.New("example and examples are mutually exclusive")
+ }
+
+ if vo := getValidationOptions(ctx); !vo.examplesValidationDisabled {
+ if example := mediaType.Example; example != nil {
+ if err := validateExampleValue(ctx, example, schema.Value); err != nil {
+ return fmt.Errorf("invalid example: %w", err)
+ }
+ }
+
+ if examples := mediaType.Examples; examples != nil {
+ names := make([]string, 0, len(examples))
+ for name := range examples {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+ for _, k := range names {
+ v := examples[k]
+ if err := v.Validate(ctx); err != nil {
+ return fmt.Errorf("example %s: %w", k, err)
+ }
+ if err := validateExampleValue(ctx, v.Value.Value, schema.Value); err != nil {
+ return fmt.Errorf("example %s: %w", k, err)
+ }
+ }
+ }
+ }
+ }
+
+ return validateExtensions(ctx, mediaType.Extensions)
+}
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (mediaType MediaType) JSONLookup(token string) (interface{}, error) {
+ switch token {
+ case "schema":
+ if mediaType.Schema != nil {
+ if mediaType.Schema.Ref != "" {
+ return &Ref{Ref: mediaType.Schema.Ref}, nil
+ }
+ return mediaType.Schema.Value, nil
+ }
+ case "example":
+ return mediaType.Example, nil
+ case "examples":
+ return mediaType.Examples, nil
+ case "encoding":
+ return mediaType.Encoding, nil
+ }
+ v, _, err := jsonpointer.GetForToken(mediaType.Extensions, token)
+ return v, err
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/openapi3.go b/vendor/github.com/getkin/kin-openapi/openapi3/openapi3.go
new file mode 100644
index 00000000..04df3505
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/openapi3.go
@@ -0,0 +1,191 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+
+ "github.com/go-openapi/jsonpointer"
+)
+
+// T is the root of an OpenAPI v3 document
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#openapi-object
+type T struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ OpenAPI string `json:"openapi" yaml:"openapi"` // Required
+ Components *Components `json:"components,omitempty" yaml:"components,omitempty"`
+ Info *Info `json:"info" yaml:"info"` // Required
+ Paths *Paths `json:"paths" yaml:"paths"` // Required
+ Security SecurityRequirements `json:"security,omitempty" yaml:"security,omitempty"`
+ Servers Servers `json:"servers,omitempty" yaml:"servers,omitempty"`
+ Tags Tags `json:"tags,omitempty" yaml:"tags,omitempty"`
+ ExternalDocs *ExternalDocs `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"`
+
+ visited visitedComponent
+}
+
+var _ jsonpointer.JSONPointable = (*T)(nil)
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (doc *T) JSONLookup(token string) (interface{}, error) {
+ switch token {
+ case "openapi":
+ return doc.OpenAPI, nil
+ case "components":
+ return doc.Components, nil
+ case "info":
+ return doc.Info, nil
+ case "paths":
+ return doc.Paths, nil
+ case "security":
+ return doc.Security, nil
+ case "servers":
+ return doc.Servers, nil
+ case "tags":
+ return doc.Tags, nil
+ case "externalDocs":
+ return doc.ExternalDocs, nil
+ }
+
+ v, _, err := jsonpointer.GetForToken(doc.Extensions, token)
+ return v, err
+}
+
+// MarshalJSON returns the JSON encoding of T.
+func (doc *T) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 4+len(doc.Extensions))
+ for k, v := range doc.Extensions {
+ m[k] = v
+ }
+ m["openapi"] = doc.OpenAPI
+ if x := doc.Components; x != nil {
+ m["components"] = x
+ }
+ m["info"] = doc.Info
+ m["paths"] = doc.Paths
+ if x := doc.Security; len(x) != 0 {
+ m["security"] = x
+ }
+ if x := doc.Servers; len(x) != 0 {
+ m["servers"] = x
+ }
+ if x := doc.Tags; len(x) != 0 {
+ m["tags"] = x
+ }
+ if x := doc.ExternalDocs; x != nil {
+ m["externalDocs"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets T to a copy of data.
+func (doc *T) UnmarshalJSON(data []byte) error {
+ type TBis T
+ var x TBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "openapi")
+ delete(x.Extensions, "components")
+ delete(x.Extensions, "info")
+ delete(x.Extensions, "paths")
+ delete(x.Extensions, "security")
+ delete(x.Extensions, "servers")
+ delete(x.Extensions, "tags")
+ delete(x.Extensions, "externalDocs")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *doc = T(x)
+ return nil
+}
+
+func (doc *T) AddOperation(path string, method string, operation *Operation) {
+ if doc.Paths == nil {
+ doc.Paths = NewPaths()
+ }
+ pathItem := doc.Paths.Value(path)
+ if pathItem == nil {
+ pathItem = &PathItem{}
+ doc.Paths.Set(path, pathItem)
+ }
+ pathItem.SetOperation(method, operation)
+}
+
+func (doc *T) AddServer(server *Server) {
+ doc.Servers = append(doc.Servers, server)
+}
+
+func (doc *T) AddServers(servers ...*Server) {
+ doc.Servers = append(doc.Servers, servers...)
+}
+
+// Validate returns an error if T does not comply with the OpenAPI spec.
+// Validations Options can be provided to modify the validation behavior.
+func (doc *T) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if doc.OpenAPI == "" {
+ return errors.New("value of openapi must be a non-empty string")
+ }
+
+ var wrap func(error) error
+
+ wrap = func(e error) error { return fmt.Errorf("invalid components: %w", e) }
+ if v := doc.Components; v != nil {
+ if err := v.Validate(ctx); err != nil {
+ return wrap(err)
+ }
+ }
+
+ wrap = func(e error) error { return fmt.Errorf("invalid info: %w", e) }
+ if v := doc.Info; v != nil {
+ if err := v.Validate(ctx); err != nil {
+ return wrap(err)
+ }
+ } else {
+ return wrap(errors.New("must be an object"))
+ }
+
+ wrap = func(e error) error { return fmt.Errorf("invalid paths: %w", e) }
+ if v := doc.Paths; v != nil {
+ if err := v.Validate(ctx); err != nil {
+ return wrap(err)
+ }
+ } else {
+ return wrap(errors.New("must be an object"))
+ }
+
+ wrap = func(e error) error { return fmt.Errorf("invalid security: %w", e) }
+ if v := doc.Security; v != nil {
+ if err := v.Validate(ctx); err != nil {
+ return wrap(err)
+ }
+ }
+
+ wrap = func(e error) error { return fmt.Errorf("invalid servers: %w", e) }
+ if v := doc.Servers; v != nil {
+ if err := v.Validate(ctx); err != nil {
+ return wrap(err)
+ }
+ }
+
+ wrap = func(e error) error { return fmt.Errorf("invalid tags: %w", e) }
+ if v := doc.Tags; v != nil {
+ if err := v.Validate(ctx); err != nil {
+ return wrap(err)
+ }
+ }
+
+ wrap = func(e error) error { return fmt.Errorf("invalid external docs: %w", e) }
+ if v := doc.ExternalDocs; v != nil {
+ if err := v.Validate(ctx); err != nil {
+ return wrap(err)
+ }
+ }
+
+ return validateExtensions(ctx, doc.Extensions)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/operation.go b/vendor/github.com/getkin/kin-openapi/openapi3/operation.go
new file mode 100644
index 00000000..d859a437
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/operation.go
@@ -0,0 +1,213 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "strconv"
+
+ "github.com/go-openapi/jsonpointer"
+)
+
+// Operation represents "operation" specified by" OpenAPI/Swagger 3.0 standard.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#operation-object
+type Operation struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ // Optional tags for documentation.
+ Tags []string `json:"tags,omitempty" yaml:"tags,omitempty"`
+
+ // Optional short summary.
+ Summary string `json:"summary,omitempty" yaml:"summary,omitempty"`
+
+ // Optional description. Should use CommonMark syntax.
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+
+ // Optional operation ID.
+ OperationID string `json:"operationId,omitempty" yaml:"operationId,omitempty"`
+
+ // Optional parameters.
+ Parameters Parameters `json:"parameters,omitempty" yaml:"parameters,omitempty"`
+
+ // Optional body parameter.
+ RequestBody *RequestBodyRef `json:"requestBody,omitempty" yaml:"requestBody,omitempty"`
+
+ // Responses.
+ Responses *Responses `json:"responses" yaml:"responses"` // Required
+
+ // Optional callbacks
+ Callbacks Callbacks `json:"callbacks,omitempty" yaml:"callbacks,omitempty"`
+
+ Deprecated bool `json:"deprecated,omitempty" yaml:"deprecated,omitempty"`
+
+ // Optional security requirements that overrides top-level security.
+ Security *SecurityRequirements `json:"security,omitempty" yaml:"security,omitempty"`
+
+ // Optional servers that overrides top-level servers.
+ Servers *Servers `json:"servers,omitempty" yaml:"servers,omitempty"`
+
+ ExternalDocs *ExternalDocs `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"`
+}
+
+var _ jsonpointer.JSONPointable = (*Operation)(nil)
+
+func NewOperation() *Operation {
+ return &Operation{}
+}
+
+// MarshalJSON returns the JSON encoding of Operation.
+func (operation Operation) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 12+len(operation.Extensions))
+ for k, v := range operation.Extensions {
+ m[k] = v
+ }
+ if x := operation.Tags; len(x) != 0 {
+ m["tags"] = x
+ }
+ if x := operation.Summary; x != "" {
+ m["summary"] = x
+ }
+ if x := operation.Description; x != "" {
+ m["description"] = x
+ }
+ if x := operation.OperationID; x != "" {
+ m["operationId"] = x
+ }
+ if x := operation.Parameters; len(x) != 0 {
+ m["parameters"] = x
+ }
+ if x := operation.RequestBody; x != nil {
+ m["requestBody"] = x
+ }
+ m["responses"] = operation.Responses
+ if x := operation.Callbacks; len(x) != 0 {
+ m["callbacks"] = x
+ }
+ if x := operation.Deprecated; x {
+ m["deprecated"] = x
+ }
+ if x := operation.Security; x != nil {
+ m["security"] = x
+ }
+ if x := operation.Servers; x != nil {
+ m["servers"] = x
+ }
+ if x := operation.ExternalDocs; x != nil {
+ m["externalDocs"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets Operation to a copy of data.
+func (operation *Operation) UnmarshalJSON(data []byte) error {
+ type OperationBis Operation
+ var x OperationBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "tags")
+ delete(x.Extensions, "summary")
+ delete(x.Extensions, "description")
+ delete(x.Extensions, "operationId")
+ delete(x.Extensions, "parameters")
+ delete(x.Extensions, "requestBody")
+ delete(x.Extensions, "responses")
+ delete(x.Extensions, "callbacks")
+ delete(x.Extensions, "deprecated")
+ delete(x.Extensions, "security")
+ delete(x.Extensions, "servers")
+ delete(x.Extensions, "externalDocs")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *operation = Operation(x)
+ return nil
+}
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (operation Operation) JSONLookup(token string) (interface{}, error) {
+ switch token {
+ case "requestBody":
+ if operation.RequestBody != nil {
+ if operation.RequestBody.Ref != "" {
+ return &Ref{Ref: operation.RequestBody.Ref}, nil
+ }
+ return operation.RequestBody.Value, nil
+ }
+ case "tags":
+ return operation.Tags, nil
+ case "summary":
+ return operation.Summary, nil
+ case "description":
+ return operation.Description, nil
+ case "operationID":
+ return operation.OperationID, nil
+ case "parameters":
+ return operation.Parameters, nil
+ case "responses":
+ return operation.Responses, nil
+ case "callbacks":
+ return operation.Callbacks, nil
+ case "deprecated":
+ return operation.Deprecated, nil
+ case "security":
+ return operation.Security, nil
+ case "servers":
+ return operation.Servers, nil
+ case "externalDocs":
+ return operation.ExternalDocs, nil
+ }
+
+ v, _, err := jsonpointer.GetForToken(operation.Extensions, token)
+ return v, err
+}
+
+func (operation *Operation) AddParameter(p *Parameter) {
+ operation.Parameters = append(operation.Parameters, &ParameterRef{Value: p})
+}
+
+func (operation *Operation) AddResponse(status int, response *Response) {
+ code := "default"
+ if 0 < status && status < 1000 {
+ code = strconv.FormatInt(int64(status), 10)
+ }
+ if operation.Responses == nil {
+ operation.Responses = NewResponses()
+ }
+ operation.Responses.Set(code, &ResponseRef{Value: response})
+}
+
+// Validate returns an error if Operation does not comply with the OpenAPI spec.
+func (operation *Operation) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if v := operation.Parameters; v != nil {
+ if err := v.Validate(ctx); err != nil {
+ return err
+ }
+ }
+
+ if v := operation.RequestBody; v != nil {
+ if err := v.Validate(ctx); err != nil {
+ return err
+ }
+ }
+
+ if v := operation.Responses; v != nil {
+ if err := v.Validate(ctx); err != nil {
+ return err
+ }
+ } else {
+ return errors.New("value of responses must be an object")
+ }
+
+ if v := operation.ExternalDocs; v != nil {
+ if err := v.Validate(ctx); err != nil {
+ return fmt.Errorf("invalid external docs: %w", err)
+ }
+ }
+
+ return validateExtensions(ctx, operation.Extensions)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/parameter.go b/vendor/github.com/getkin/kin-openapi/openapi3/parameter.go
new file mode 100644
index 00000000..f5a157de
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/parameter.go
@@ -0,0 +1,407 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "sort"
+ "strconv"
+
+ "github.com/go-openapi/jsonpointer"
+)
+
+// Parameters is specified by OpenAPI/Swagger 3.0 standard.
+type Parameters []*ParameterRef
+
+var _ jsonpointer.JSONPointable = (*Parameters)(nil)
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (p Parameters) JSONLookup(token string) (interface{}, error) {
+ index, err := strconv.Atoi(token)
+ if err != nil {
+ return nil, err
+ }
+ if index < 0 || index >= len(p) {
+ return nil, fmt.Errorf("index %d out of bounds of array of length %d", index, len(p))
+ }
+
+ ref := p[index]
+ if ref != nil && ref.Ref != "" {
+ return &Ref{Ref: ref.Ref}, nil
+ }
+ return ref.Value, nil
+}
+
+func NewParameters() Parameters {
+ return make(Parameters, 0, 4)
+}
+
+func (parameters Parameters) GetByInAndName(in string, name string) *Parameter {
+ for _, item := range parameters {
+ if v := item.Value; v != nil {
+ if v.Name == name && v.In == in {
+ return v
+ }
+ }
+ }
+ return nil
+}
+
+// Validate returns an error if Parameters does not comply with the OpenAPI spec.
+func (parameters Parameters) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ dupes := make(map[string]struct{})
+ for _, parameterRef := range parameters {
+ if v := parameterRef.Value; v != nil {
+ key := v.In + ":" + v.Name
+ if _, ok := dupes[key]; ok {
+ return fmt.Errorf("more than one %q parameter has name %q", v.In, v.Name)
+ }
+ dupes[key] = struct{}{}
+ }
+
+ if err := parameterRef.Validate(ctx); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// Parameter is specified by OpenAPI/Swagger 3.0 standard.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#parameter-object
+type Parameter struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ Name string `json:"name,omitempty" yaml:"name,omitempty"`
+ In string `json:"in,omitempty" yaml:"in,omitempty"`
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ Style string `json:"style,omitempty" yaml:"style,omitempty"`
+ Explode *bool `json:"explode,omitempty" yaml:"explode,omitempty"`
+ AllowEmptyValue bool `json:"allowEmptyValue,omitempty" yaml:"allowEmptyValue,omitempty"`
+ AllowReserved bool `json:"allowReserved,omitempty" yaml:"allowReserved,omitempty"`
+ Deprecated bool `json:"deprecated,omitempty" yaml:"deprecated,omitempty"`
+ Required bool `json:"required,omitempty" yaml:"required,omitempty"`
+ Schema *SchemaRef `json:"schema,omitempty" yaml:"schema,omitempty"`
+ Example interface{} `json:"example,omitempty" yaml:"example,omitempty"`
+ Examples Examples `json:"examples,omitempty" yaml:"examples,omitempty"`
+ Content Content `json:"content,omitempty" yaml:"content,omitempty"`
+}
+
+var _ jsonpointer.JSONPointable = (*Parameter)(nil)
+
+const (
+ ParameterInPath = "path"
+ ParameterInQuery = "query"
+ ParameterInHeader = "header"
+ ParameterInCookie = "cookie"
+)
+
+func NewPathParameter(name string) *Parameter {
+ return &Parameter{
+ Name: name,
+ In: ParameterInPath,
+ Required: true,
+ }
+}
+
+func NewQueryParameter(name string) *Parameter {
+ return &Parameter{
+ Name: name,
+ In: ParameterInQuery,
+ }
+}
+
+func NewHeaderParameter(name string) *Parameter {
+ return &Parameter{
+ Name: name,
+ In: ParameterInHeader,
+ }
+}
+
+func NewCookieParameter(name string) *Parameter {
+ return &Parameter{
+ Name: name,
+ In: ParameterInCookie,
+ }
+}
+
+func (parameter *Parameter) WithDescription(value string) *Parameter {
+ parameter.Description = value
+ return parameter
+}
+
+func (parameter *Parameter) WithRequired(value bool) *Parameter {
+ parameter.Required = value
+ return parameter
+}
+
+func (parameter *Parameter) WithSchema(value *Schema) *Parameter {
+ if value == nil {
+ parameter.Schema = nil
+ } else {
+ parameter.Schema = &SchemaRef{
+ Value: value,
+ }
+ }
+ return parameter
+}
+
+// MarshalJSON returns the JSON encoding of Parameter.
+func (parameter Parameter) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 13+len(parameter.Extensions))
+ for k, v := range parameter.Extensions {
+ m[k] = v
+ }
+
+ if x := parameter.Name; x != "" {
+ m["name"] = x
+ }
+ if x := parameter.In; x != "" {
+ m["in"] = x
+ }
+ if x := parameter.Description; x != "" {
+ m["description"] = x
+ }
+ if x := parameter.Style; x != "" {
+ m["style"] = x
+ }
+ if x := parameter.Explode; x != nil {
+ m["explode"] = x
+ }
+ if x := parameter.AllowEmptyValue; x {
+ m["allowEmptyValue"] = x
+ }
+ if x := parameter.AllowReserved; x {
+ m["allowReserved"] = x
+ }
+ if x := parameter.Deprecated; x {
+ m["deprecated"] = x
+ }
+ if x := parameter.Required; x {
+ m["required"] = x
+ }
+ if x := parameter.Schema; x != nil {
+ m["schema"] = x
+ }
+ if x := parameter.Example; x != nil {
+ m["example"] = x
+ }
+ if x := parameter.Examples; len(x) != 0 {
+ m["examples"] = x
+ }
+ if x := parameter.Content; len(x) != 0 {
+ m["content"] = x
+ }
+
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets Parameter to a copy of data.
+func (parameter *Parameter) UnmarshalJSON(data []byte) error {
+ type ParameterBis Parameter
+ var x ParameterBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+
+ delete(x.Extensions, "name")
+ delete(x.Extensions, "in")
+ delete(x.Extensions, "description")
+ delete(x.Extensions, "style")
+ delete(x.Extensions, "explode")
+ delete(x.Extensions, "allowEmptyValue")
+ delete(x.Extensions, "allowReserved")
+ delete(x.Extensions, "deprecated")
+ delete(x.Extensions, "required")
+ delete(x.Extensions, "schema")
+ delete(x.Extensions, "example")
+ delete(x.Extensions, "examples")
+ delete(x.Extensions, "content")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+
+ *parameter = Parameter(x)
+ return nil
+}
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (parameter Parameter) JSONLookup(token string) (interface{}, error) {
+ switch token {
+ case "schema":
+ if parameter.Schema != nil {
+ if parameter.Schema.Ref != "" {
+ return &Ref{Ref: parameter.Schema.Ref}, nil
+ }
+ return parameter.Schema.Value, nil
+ }
+ case "name":
+ return parameter.Name, nil
+ case "in":
+ return parameter.In, nil
+ case "description":
+ return parameter.Description, nil
+ case "style":
+ return parameter.Style, nil
+ case "explode":
+ return parameter.Explode, nil
+ case "allowEmptyValue":
+ return parameter.AllowEmptyValue, nil
+ case "allowReserved":
+ return parameter.AllowReserved, nil
+ case "deprecated":
+ return parameter.Deprecated, nil
+ case "required":
+ return parameter.Required, nil
+ case "example":
+ return parameter.Example, nil
+ case "examples":
+ return parameter.Examples, nil
+ case "content":
+ return parameter.Content, nil
+ }
+
+ v, _, err := jsonpointer.GetForToken(parameter.Extensions, token)
+ return v, err
+}
+
+// SerializationMethod returns a parameter's serialization method.
+// When a parameter's serialization method is not defined the method returns
+// the default serialization method corresponding to a parameter's location.
+func (parameter *Parameter) SerializationMethod() (*SerializationMethod, error) {
+ switch parameter.In {
+ case ParameterInPath, ParameterInHeader:
+ style := parameter.Style
+ if style == "" {
+ style = SerializationSimple
+ }
+ explode := false
+ if parameter.Explode != nil {
+ explode = *parameter.Explode
+ }
+ return &SerializationMethod{Style: style, Explode: explode}, nil
+ case ParameterInQuery, ParameterInCookie:
+ style := parameter.Style
+ if style == "" {
+ style = SerializationForm
+ }
+ explode := true
+ if parameter.Explode != nil {
+ explode = *parameter.Explode
+ }
+ return &SerializationMethod{Style: style, Explode: explode}, nil
+ default:
+ return nil, fmt.Errorf("unexpected parameter's 'in': %q", parameter.In)
+ }
+}
+
+// Validate returns an error if Parameter does not comply with the OpenAPI spec.
+func (parameter *Parameter) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if parameter.Name == "" {
+ return errors.New("parameter name can't be blank")
+ }
+ in := parameter.In
+ switch in {
+ case
+ ParameterInPath,
+ ParameterInQuery,
+ ParameterInHeader,
+ ParameterInCookie:
+ default:
+ return fmt.Errorf("parameter can't have 'in' value %q", parameter.In)
+ }
+
+ if in == ParameterInPath && !parameter.Required {
+ return fmt.Errorf("path parameter %q must be required", parameter.Name)
+ }
+
+ // Validate a parameter's serialization method.
+ sm, err := parameter.SerializationMethod()
+ if err != nil {
+ return err
+ }
+ var smSupported bool
+ switch {
+ case parameter.In == ParameterInPath && sm.Style == SerializationSimple && !sm.Explode,
+ parameter.In == ParameterInPath && sm.Style == SerializationSimple && sm.Explode,
+ parameter.In == ParameterInPath && sm.Style == SerializationLabel && !sm.Explode,
+ parameter.In == ParameterInPath && sm.Style == SerializationLabel && sm.Explode,
+ parameter.In == ParameterInPath && sm.Style == SerializationMatrix && !sm.Explode,
+ parameter.In == ParameterInPath && sm.Style == SerializationMatrix && sm.Explode,
+
+ parameter.In == ParameterInQuery && sm.Style == SerializationForm && sm.Explode,
+ parameter.In == ParameterInQuery && sm.Style == SerializationForm && !sm.Explode,
+ parameter.In == ParameterInQuery && sm.Style == SerializationSpaceDelimited && sm.Explode,
+ parameter.In == ParameterInQuery && sm.Style == SerializationSpaceDelimited && !sm.Explode,
+ parameter.In == ParameterInQuery && sm.Style == SerializationPipeDelimited && sm.Explode,
+ parameter.In == ParameterInQuery && sm.Style == SerializationPipeDelimited && !sm.Explode,
+ parameter.In == ParameterInQuery && sm.Style == SerializationDeepObject && sm.Explode,
+
+ parameter.In == ParameterInHeader && sm.Style == SerializationSimple && !sm.Explode,
+ parameter.In == ParameterInHeader && sm.Style == SerializationSimple && sm.Explode,
+
+ parameter.In == ParameterInCookie && sm.Style == SerializationForm && !sm.Explode,
+ parameter.In == ParameterInCookie && sm.Style == SerializationForm && sm.Explode:
+ smSupported = true
+ }
+ if !smSupported {
+ e := fmt.Errorf("serialization method with style=%q and explode=%v is not supported by a %s parameter", sm.Style, sm.Explode, in)
+ return fmt.Errorf("parameter %q schema is invalid: %w", parameter.Name, e)
+ }
+
+ if (parameter.Schema == nil) == (len(parameter.Content) == 0) {
+ e := errors.New("parameter must contain exactly one of content and schema")
+ return fmt.Errorf("parameter %q schema is invalid: %w", parameter.Name, e)
+ }
+
+ if content := parameter.Content; content != nil {
+ e := errors.New("parameter content must only contain one entry")
+ if len(content) > 1 {
+ return fmt.Errorf("parameter %q content is invalid: %w", parameter.Name, e)
+ }
+
+ if err := content.Validate(ctx); err != nil {
+ return fmt.Errorf("parameter %q content is invalid: %w", parameter.Name, err)
+ }
+ }
+
+ if schema := parameter.Schema; schema != nil {
+ if err := schema.Validate(ctx); err != nil {
+ return fmt.Errorf("parameter %q schema is invalid: %w", parameter.Name, err)
+ }
+ if parameter.Example != nil && parameter.Examples != nil {
+ return fmt.Errorf("parameter %q example and examples are mutually exclusive", parameter.Name)
+ }
+
+ if vo := getValidationOptions(ctx); vo.examplesValidationDisabled {
+ return nil
+ }
+ if example := parameter.Example; example != nil {
+ if err := validateExampleValue(ctx, example, schema.Value); err != nil {
+ return fmt.Errorf("invalid example: %w", err)
+ }
+ } else if examples := parameter.Examples; examples != nil {
+ names := make([]string, 0, len(examples))
+ for name := range examples {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+ for _, k := range names {
+ v := examples[k]
+ if err := v.Validate(ctx); err != nil {
+ return fmt.Errorf("%s: %w", k, err)
+ }
+ if err := validateExampleValue(ctx, v.Value.Value, schema.Value); err != nil {
+ return fmt.Errorf("%s: %w", k, err)
+ }
+ }
+ }
+ }
+
+ return validateExtensions(ctx, parameter.Extensions)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/path_item.go b/vendor/github.com/getkin/kin-openapi/openapi3/path_item.go
new file mode 100644
index 00000000..e5dd0fb6
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/path_item.go
@@ -0,0 +1,239 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "sort"
+)
+
+// PathItem is specified by OpenAPI/Swagger standard version 3.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#path-item-object
+type PathItem struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ Ref string `json:"$ref,omitempty" yaml:"$ref,omitempty"`
+ Summary string `json:"summary,omitempty" yaml:"summary,omitempty"`
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ Connect *Operation `json:"connect,omitempty" yaml:"connect,omitempty"`
+ Delete *Operation `json:"delete,omitempty" yaml:"delete,omitempty"`
+ Get *Operation `json:"get,omitempty" yaml:"get,omitempty"`
+ Head *Operation `json:"head,omitempty" yaml:"head,omitempty"`
+ Options *Operation `json:"options,omitempty" yaml:"options,omitempty"`
+ Patch *Operation `json:"patch,omitempty" yaml:"patch,omitempty"`
+ Post *Operation `json:"post,omitempty" yaml:"post,omitempty"`
+ Put *Operation `json:"put,omitempty" yaml:"put,omitempty"`
+ Trace *Operation `json:"trace,omitempty" yaml:"trace,omitempty"`
+ Servers Servers `json:"servers,omitempty" yaml:"servers,omitempty"`
+ Parameters Parameters `json:"parameters,omitempty" yaml:"parameters,omitempty"`
+}
+
+// MarshalJSON returns the JSON encoding of PathItem.
+func (pathItem PathItem) MarshalJSON() ([]byte, error) {
+ if ref := pathItem.Ref; ref != "" {
+ return json.Marshal(Ref{Ref: ref})
+ }
+
+ m := make(map[string]interface{}, 13+len(pathItem.Extensions))
+ for k, v := range pathItem.Extensions {
+ m[k] = v
+ }
+ if x := pathItem.Summary; x != "" {
+ m["summary"] = x
+ }
+ if x := pathItem.Description; x != "" {
+ m["description"] = x
+ }
+ if x := pathItem.Connect; x != nil {
+ m["connect"] = x
+ }
+ if x := pathItem.Delete; x != nil {
+ m["delete"] = x
+ }
+ if x := pathItem.Get; x != nil {
+ m["get"] = x
+ }
+ if x := pathItem.Head; x != nil {
+ m["head"] = x
+ }
+ if x := pathItem.Options; x != nil {
+ m["options"] = x
+ }
+ if x := pathItem.Patch; x != nil {
+ m["patch"] = x
+ }
+ if x := pathItem.Post; x != nil {
+ m["post"] = x
+ }
+ if x := pathItem.Put; x != nil {
+ m["put"] = x
+ }
+ if x := pathItem.Trace; x != nil {
+ m["trace"] = x
+ }
+ if x := pathItem.Servers; len(x) != 0 {
+ m["servers"] = x
+ }
+ if x := pathItem.Parameters; len(x) != 0 {
+ m["parameters"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets PathItem to a copy of data.
+func (pathItem *PathItem) UnmarshalJSON(data []byte) error {
+ type PathItemBis PathItem
+ var x PathItemBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "$ref")
+ delete(x.Extensions, "summary")
+ delete(x.Extensions, "description")
+ delete(x.Extensions, "connect")
+ delete(x.Extensions, "delete")
+ delete(x.Extensions, "get")
+ delete(x.Extensions, "head")
+ delete(x.Extensions, "options")
+ delete(x.Extensions, "patch")
+ delete(x.Extensions, "post")
+ delete(x.Extensions, "put")
+ delete(x.Extensions, "trace")
+ delete(x.Extensions, "servers")
+ delete(x.Extensions, "parameters")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *pathItem = PathItem(x)
+ return nil
+}
+
+func (pathItem *PathItem) Operations() map[string]*Operation {
+ operations := make(map[string]*Operation)
+ if v := pathItem.Connect; v != nil {
+ operations[http.MethodConnect] = v
+ }
+ if v := pathItem.Delete; v != nil {
+ operations[http.MethodDelete] = v
+ }
+ if v := pathItem.Get; v != nil {
+ operations[http.MethodGet] = v
+ }
+ if v := pathItem.Head; v != nil {
+ operations[http.MethodHead] = v
+ }
+ if v := pathItem.Options; v != nil {
+ operations[http.MethodOptions] = v
+ }
+ if v := pathItem.Patch; v != nil {
+ operations[http.MethodPatch] = v
+ }
+ if v := pathItem.Post; v != nil {
+ operations[http.MethodPost] = v
+ }
+ if v := pathItem.Put; v != nil {
+ operations[http.MethodPut] = v
+ }
+ if v := pathItem.Trace; v != nil {
+ operations[http.MethodTrace] = v
+ }
+ return operations
+}
+
+func (pathItem *PathItem) GetOperation(method string) *Operation {
+ switch method {
+ case http.MethodConnect:
+ return pathItem.Connect
+ case http.MethodDelete:
+ return pathItem.Delete
+ case http.MethodGet:
+ return pathItem.Get
+ case http.MethodHead:
+ return pathItem.Head
+ case http.MethodOptions:
+ return pathItem.Options
+ case http.MethodPatch:
+ return pathItem.Patch
+ case http.MethodPost:
+ return pathItem.Post
+ case http.MethodPut:
+ return pathItem.Put
+ case http.MethodTrace:
+ return pathItem.Trace
+ default:
+ panic(fmt.Errorf("unsupported HTTP method %q", method))
+ }
+}
+
+func (pathItem *PathItem) SetOperation(method string, operation *Operation) {
+ switch method {
+ case http.MethodConnect:
+ pathItem.Connect = operation
+ case http.MethodDelete:
+ pathItem.Delete = operation
+ case http.MethodGet:
+ pathItem.Get = operation
+ case http.MethodHead:
+ pathItem.Head = operation
+ case http.MethodOptions:
+ pathItem.Options = operation
+ case http.MethodPatch:
+ pathItem.Patch = operation
+ case http.MethodPost:
+ pathItem.Post = operation
+ case http.MethodPut:
+ pathItem.Put = operation
+ case http.MethodTrace:
+ pathItem.Trace = operation
+ default:
+ panic(fmt.Errorf("unsupported HTTP method %q", method))
+ }
+}
+
+// Validate returns an error if PathItem does not comply with the OpenAPI spec.
+func (pathItem *PathItem) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ operations := pathItem.Operations()
+
+ methods := make([]string, 0, len(operations))
+ for method := range operations {
+ methods = append(methods, method)
+ }
+ sort.Strings(methods)
+ for _, method := range methods {
+ operation := operations[method]
+ if err := operation.Validate(ctx); err != nil {
+ return fmt.Errorf("invalid operation %s: %v", method, err)
+ }
+ }
+
+ if v := pathItem.Parameters; v != nil {
+ if err := v.Validate(ctx); err != nil {
+ return err
+ }
+ }
+
+ return validateExtensions(ctx, pathItem.Extensions)
+}
+
+// isEmpty's introduced in 546590b1
+func (pathItem *PathItem) isEmpty() bool {
+ // NOTE: ignores pathItem.Extensions
+ // NOTE: ignores pathItem.Ref
+ return pathItem.Summary == "" &&
+ pathItem.Description == "" &&
+ pathItem.Connect == nil &&
+ pathItem.Delete == nil &&
+ pathItem.Get == nil &&
+ pathItem.Head == nil &&
+ pathItem.Options == nil &&
+ pathItem.Patch == nil &&
+ pathItem.Post == nil &&
+ pathItem.Put == nil &&
+ pathItem.Trace == nil &&
+ len(pathItem.Servers) == 0 &&
+ len(pathItem.Parameters) == 0
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/paths.go b/vendor/github.com/getkin/kin-openapi/openapi3/paths.go
new file mode 100644
index 00000000..daafe71c
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/paths.go
@@ -0,0 +1,283 @@
+package openapi3
+
+import (
+ "context"
+ "fmt"
+ "sort"
+ "strings"
+)
+
+// Paths is specified by OpenAPI/Swagger standard version 3.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#paths-object
+type Paths struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ m map[string]*PathItem
+}
+
+// NewPaths builds a paths object with path items in insertion order.
+func NewPaths(opts ...NewPathsOption) *Paths {
+ paths := NewPathsWithCapacity(len(opts))
+ for _, opt := range opts {
+ opt(paths)
+ }
+ return paths
+}
+
+// NewPathsOption describes options to NewPaths func
+type NewPathsOption func(*Paths)
+
+// WithPath adds a named path item
+func WithPath(path string, pathItem *PathItem) NewPathsOption {
+ return func(paths *Paths) {
+ if p := pathItem; p != nil && path != "" {
+ paths.Set(path, p)
+ }
+ }
+}
+
+// Validate returns an error if Paths does not comply with the OpenAPI spec.
+func (paths *Paths) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ normalizedPaths := make(map[string]string, paths.Len())
+
+ keys := make([]string, 0, paths.Len())
+ for key := range paths.Map() {
+ keys = append(keys, key)
+ }
+ sort.Strings(keys)
+ for _, path := range keys {
+ pathItem := paths.Value(path)
+ if path == "" || path[0] != '/' {
+ return fmt.Errorf("path %q does not start with a forward slash (/)", path)
+ }
+
+ if pathItem == nil {
+ pathItem = &PathItem{}
+ paths.Set(path, pathItem)
+ }
+
+ normalizedPath, _, varsInPath := normalizeTemplatedPath(path)
+ if oldPath, ok := normalizedPaths[normalizedPath]; ok {
+ return fmt.Errorf("conflicting paths %q and %q", path, oldPath)
+ }
+ normalizedPaths[path] = path
+
+ var commonParams []string
+ for _, parameterRef := range pathItem.Parameters {
+ if parameterRef != nil {
+ if parameter := parameterRef.Value; parameter != nil && parameter.In == ParameterInPath {
+ commonParams = append(commonParams, parameter.Name)
+ }
+ }
+ }
+ operations := pathItem.Operations()
+ methods := make([]string, 0, len(operations))
+ for method := range operations {
+ methods = append(methods, method)
+ }
+ sort.Strings(methods)
+ for _, method := range methods {
+ operation := operations[method]
+ var setParams []string
+ for _, parameterRef := range operation.Parameters {
+ if parameterRef != nil {
+ if parameter := parameterRef.Value; parameter != nil && parameter.In == ParameterInPath {
+ setParams = append(setParams, parameter.Name)
+ }
+ }
+ }
+ if expected := len(setParams) + len(commonParams); expected != len(varsInPath) {
+ expected -= len(varsInPath)
+ if expected < 0 {
+ expected *= -1
+ }
+ missing := make(map[string]struct{}, expected)
+ definedParams := append(setParams, commonParams...)
+ for _, name := range definedParams {
+ if _, ok := varsInPath[name]; !ok {
+ missing[name] = struct{}{}
+ }
+ }
+ for name := range varsInPath {
+ got := false
+ for _, othername := range definedParams {
+ if othername == name {
+ got = true
+ break
+ }
+ }
+ if !got {
+ missing[name] = struct{}{}
+ }
+ }
+ if len(missing) != 0 {
+ missings := make([]string, 0, len(missing))
+ for name := range missing {
+ missings = append(missings, name)
+ }
+ return fmt.Errorf("operation %s %s must define exactly all path parameters (missing: %v)", method, path, missings)
+ }
+ }
+ }
+
+ if err := pathItem.Validate(ctx); err != nil {
+ return fmt.Errorf("invalid path %s: %v", path, err)
+ }
+ }
+
+ if err := paths.validateUniqueOperationIDs(); err != nil {
+ return err
+ }
+
+ return validateExtensions(ctx, paths.Extensions)
+}
+
+// InMatchingOrder returns paths in the order they are matched against URLs.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#paths-object
+// When matching URLs, concrete (non-templated) paths would be matched
+// before their templated counterparts.
+func (paths *Paths) InMatchingOrder() []string {
+ // NOTE: sorting by number of variables ASC then by descending lexicographical
+ // order seems to be a good heuristic.
+ if paths.Len() == 0 {
+ return nil
+ }
+
+ vars := make(map[int][]string)
+ max := 0
+ for path := range paths.Map() {
+ count := strings.Count(path, "}")
+ vars[count] = append(vars[count], path)
+ if count > max {
+ max = count
+ }
+ }
+
+ ordered := make([]string, 0, paths.Len())
+ for c := 0; c <= max; c++ {
+ if ps, ok := vars[c]; ok {
+ sort.Sort(sort.Reverse(sort.StringSlice(ps)))
+ ordered = append(ordered, ps...)
+ }
+ }
+ return ordered
+}
+
+// Find returns a path that matches the key.
+//
+// The method ignores differences in template variable names (except possible "*" suffix).
+//
+// For example:
+//
+// paths := openapi3.Paths {
+// "/person/{personName}": &openapi3.PathItem{},
+// }
+// pathItem := path.Find("/person/{name}")
+//
+// would return the correct path item.
+func (paths *Paths) Find(key string) *PathItem {
+ // Try directly access the map
+ pathItem := paths.Value(key)
+ if pathItem != nil {
+ return pathItem
+ }
+
+ normalizedPath, expected, _ := normalizeTemplatedPath(key)
+ for path, pathItem := range paths.Map() {
+ pathNormalized, got, _ := normalizeTemplatedPath(path)
+ if got == expected && pathNormalized == normalizedPath {
+ return pathItem
+ }
+ }
+ return nil
+}
+
+func (paths *Paths) validateUniqueOperationIDs() error {
+ operationIDs := make(map[string]string)
+ for urlPath, pathItem := range paths.Map() {
+ if pathItem == nil {
+ continue
+ }
+ for httpMethod, operation := range pathItem.Operations() {
+ if operation == nil || operation.OperationID == "" {
+ continue
+ }
+ endpoint := httpMethod + " " + urlPath
+ if endpointDup, ok := operationIDs[operation.OperationID]; ok {
+ if endpoint > endpointDup { // For make error message a bit more deterministic. May be useful for tests.
+ endpoint, endpointDup = endpointDup, endpoint
+ }
+ return fmt.Errorf("operations %q and %q have the same operation id %q",
+ endpoint, endpointDup, operation.OperationID)
+ }
+ operationIDs[operation.OperationID] = endpoint
+ }
+ }
+ return nil
+}
+
+// Support YAML Marshaler interface for gopkg.in/yaml
+func (paths *Paths) MarshalYAML() (any, error) {
+ res := make(map[string]any, len(paths.Extensions)+len(paths.m))
+
+ for k, v := range paths.Extensions {
+ res[k] = v
+ }
+
+ for k, v := range paths.m {
+ res[k] = v
+ }
+
+ return res, nil
+}
+
+func normalizeTemplatedPath(path string) (string, uint, map[string]struct{}) {
+ if strings.IndexByte(path, '{') < 0 {
+ return path, 0, nil
+ }
+
+ var buffTpl strings.Builder
+ buffTpl.Grow(len(path))
+
+ var (
+ cc rune
+ count uint
+ isVariable bool
+ vars = make(map[string]struct{})
+ buffVar strings.Builder
+ )
+ for i, c := range path {
+ if isVariable {
+ if c == '}' {
+ // End path variable
+ isVariable = false
+
+ vars[buffVar.String()] = struct{}{}
+ buffVar = strings.Builder{}
+
+ // First append possible '*' before this character
+ // The character '}' will be appended
+ if i > 0 && cc == '*' {
+ buffTpl.WriteRune(cc)
+ }
+ } else {
+ buffVar.WriteRune(c)
+ continue
+ }
+
+ } else if c == '{' {
+ // Begin path variable
+ isVariable = true
+
+ // The character '{' will be appended
+ count++
+ }
+
+ // Append the character
+ buffTpl.WriteRune(c)
+ cc = c
+ }
+ return buffTpl.String(), count, vars
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/ref.go b/vendor/github.com/getkin/kin-openapi/openapi3/ref.go
new file mode 100644
index 00000000..a937de4a
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/ref.go
@@ -0,0 +1,7 @@
+package openapi3
+
+// Ref is specified by OpenAPI/Swagger 3.0 standard.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#reference-object
+type Ref struct {
+ Ref string `json:"$ref" yaml:"$ref"`
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/refs.go b/vendor/github.com/getkin/kin-openapi/openapi3/refs.go
new file mode 100644
index 00000000..a7e1e368
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/refs.go
@@ -0,0 +1,713 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "sort"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/perimeterx/marshmallow"
+)
+
+// CallbackRef represents either a Callback or a $ref to a Callback.
+// When serializing and both fields are set, Ref is preferred over Value.
+type CallbackRef struct {
+ Ref string
+ Value *Callback
+ extra []string
+}
+
+var _ jsonpointer.JSONPointable = (*CallbackRef)(nil)
+
+func (x *CallbackRef) isEmpty() bool { return x == nil || x.Ref == "" && x.Value == nil }
+
+// MarshalYAML returns the YAML encoding of CallbackRef.
+func (x CallbackRef) MarshalYAML() (interface{}, error) {
+ if ref := x.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ }
+ return x.Value, nil
+}
+
+// MarshalJSON returns the JSON encoding of CallbackRef.
+func (x CallbackRef) MarshalJSON() ([]byte, error) {
+ if ref := x.Ref; ref != "" {
+ return json.Marshal(Ref{Ref: ref})
+ }
+ return json.Marshal(x.Value)
+}
+
+// UnmarshalJSON sets CallbackRef to a copy of data.
+func (x *CallbackRef) UnmarshalJSON(data []byte) error {
+ var refOnly Ref
+ if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" {
+ x.Ref = refOnly.Ref
+ if len(extra) != 0 {
+ x.extra = make([]string, 0, len(extra))
+ for key := range extra {
+ x.extra = append(x.extra, key)
+ }
+ sort.Strings(x.extra)
+ }
+ return nil
+ }
+ return json.Unmarshal(data, &x.Value)
+}
+
+// Validate returns an error if CallbackRef does not comply with the OpenAPI spec.
+func (x *CallbackRef) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+ if extra := x.extra; len(extra) != 0 {
+ extras := make([]string, 0, len(extra))
+ allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed
+ for _, ex := range extra {
+ if allowed != nil {
+ if _, ok := allowed[ex]; ok {
+ continue
+ }
+ }
+ extras = append(extras, ex)
+ }
+ if len(extras) != 0 {
+ return fmt.Errorf("extra sibling fields: %+v", extras)
+ }
+ }
+ if v := x.Value; v != nil {
+ return v.Validate(ctx)
+ }
+ return foundUnresolvedRef(x.Ref)
+}
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (x *CallbackRef) JSONLookup(token string) (interface{}, error) {
+ if token == "$ref" {
+ return x.Ref, nil
+ }
+ ptr, _, err := jsonpointer.GetForToken(x.Value, token)
+ return ptr, err
+}
+
+// ExampleRef represents either a Example or a $ref to a Example.
+// When serializing and both fields are set, Ref is preferred over Value.
+type ExampleRef struct {
+ Ref string
+ Value *Example
+ extra []string
+}
+
+var _ jsonpointer.JSONPointable = (*ExampleRef)(nil)
+
+func (x *ExampleRef) isEmpty() bool { return x == nil || x.Ref == "" && x.Value == nil }
+
+// MarshalYAML returns the YAML encoding of ExampleRef.
+func (x ExampleRef) MarshalYAML() (interface{}, error) {
+ if ref := x.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ }
+ return x.Value, nil
+}
+
+// MarshalJSON returns the JSON encoding of ExampleRef.
+func (x ExampleRef) MarshalJSON() ([]byte, error) {
+ if ref := x.Ref; ref != "" {
+ return json.Marshal(Ref{Ref: ref})
+ }
+ return x.Value.MarshalJSON()
+}
+
+// UnmarshalJSON sets ExampleRef to a copy of data.
+func (x *ExampleRef) UnmarshalJSON(data []byte) error {
+ var refOnly Ref
+ if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" {
+ x.Ref = refOnly.Ref
+ if len(extra) != 0 {
+ x.extra = make([]string, 0, len(extra))
+ for key := range extra {
+ x.extra = append(x.extra, key)
+ }
+ sort.Strings(x.extra)
+ }
+ return nil
+ }
+ return json.Unmarshal(data, &x.Value)
+}
+
+// Validate returns an error if ExampleRef does not comply with the OpenAPI spec.
+func (x *ExampleRef) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+ if extra := x.extra; len(extra) != 0 {
+ extras := make([]string, 0, len(extra))
+ allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed
+ for _, ex := range extra {
+ if allowed != nil {
+ if _, ok := allowed[ex]; ok {
+ continue
+ }
+ }
+ extras = append(extras, ex)
+ }
+ if len(extras) != 0 {
+ return fmt.Errorf("extra sibling fields: %+v", extras)
+ }
+ }
+ if v := x.Value; v != nil {
+ return v.Validate(ctx)
+ }
+ return foundUnresolvedRef(x.Ref)
+}
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (x *ExampleRef) JSONLookup(token string) (interface{}, error) {
+ if token == "$ref" {
+ return x.Ref, nil
+ }
+ ptr, _, err := jsonpointer.GetForToken(x.Value, token)
+ return ptr, err
+}
+
+// HeaderRef represents either a Header or a $ref to a Header.
+// When serializing and both fields are set, Ref is preferred over Value.
+type HeaderRef struct {
+ Ref string
+ Value *Header
+ extra []string
+}
+
+var _ jsonpointer.JSONPointable = (*HeaderRef)(nil)
+
+func (x *HeaderRef) isEmpty() bool { return x == nil || x.Ref == "" && x.Value == nil }
+
+// MarshalYAML returns the YAML encoding of HeaderRef.
+func (x HeaderRef) MarshalYAML() (interface{}, error) {
+ if ref := x.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ }
+ return x.Value, nil
+}
+
+// MarshalJSON returns the JSON encoding of HeaderRef.
+func (x HeaderRef) MarshalJSON() ([]byte, error) {
+ if ref := x.Ref; ref != "" {
+ return json.Marshal(Ref{Ref: ref})
+ }
+ return x.Value.MarshalJSON()
+}
+
+// UnmarshalJSON sets HeaderRef to a copy of data.
+func (x *HeaderRef) UnmarshalJSON(data []byte) error {
+ var refOnly Ref
+ if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" {
+ x.Ref = refOnly.Ref
+ if len(extra) != 0 {
+ x.extra = make([]string, 0, len(extra))
+ for key := range extra {
+ x.extra = append(x.extra, key)
+ }
+ sort.Strings(x.extra)
+ }
+ return nil
+ }
+ return json.Unmarshal(data, &x.Value)
+}
+
+// Validate returns an error if HeaderRef does not comply with the OpenAPI spec.
+func (x *HeaderRef) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+ if extra := x.extra; len(extra) != 0 {
+ extras := make([]string, 0, len(extra))
+ allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed
+ for _, ex := range extra {
+ if allowed != nil {
+ if _, ok := allowed[ex]; ok {
+ continue
+ }
+ }
+ extras = append(extras, ex)
+ }
+ if len(extras) != 0 {
+ return fmt.Errorf("extra sibling fields: %+v", extras)
+ }
+ }
+ if v := x.Value; v != nil {
+ return v.Validate(ctx)
+ }
+ return foundUnresolvedRef(x.Ref)
+}
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (x *HeaderRef) JSONLookup(token string) (interface{}, error) {
+ if token == "$ref" {
+ return x.Ref, nil
+ }
+ ptr, _, err := jsonpointer.GetForToken(x.Value, token)
+ return ptr, err
+}
+
+// LinkRef represents either a Link or a $ref to a Link.
+// When serializing and both fields are set, Ref is preferred over Value.
+type LinkRef struct {
+ Ref string
+ Value *Link
+ extra []string
+}
+
+var _ jsonpointer.JSONPointable = (*LinkRef)(nil)
+
+func (x *LinkRef) isEmpty() bool { return x == nil || x.Ref == "" && x.Value == nil }
+
+// MarshalYAML returns the YAML encoding of LinkRef.
+func (x LinkRef) MarshalYAML() (interface{}, error) {
+ if ref := x.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ }
+ return x.Value, nil
+}
+
+// MarshalJSON returns the JSON encoding of LinkRef.
+func (x LinkRef) MarshalJSON() ([]byte, error) {
+ if ref := x.Ref; ref != "" {
+ return json.Marshal(Ref{Ref: ref})
+ }
+ return x.Value.MarshalJSON()
+}
+
+// UnmarshalJSON sets LinkRef to a copy of data.
+func (x *LinkRef) UnmarshalJSON(data []byte) error {
+ var refOnly Ref
+ if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" {
+ x.Ref = refOnly.Ref
+ if len(extra) != 0 {
+ x.extra = make([]string, 0, len(extra))
+ for key := range extra {
+ x.extra = append(x.extra, key)
+ }
+ sort.Strings(x.extra)
+ }
+ return nil
+ }
+ return json.Unmarshal(data, &x.Value)
+}
+
+// Validate returns an error if LinkRef does not comply with the OpenAPI spec.
+func (x *LinkRef) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+ if extra := x.extra; len(extra) != 0 {
+ extras := make([]string, 0, len(extra))
+ allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed
+ for _, ex := range extra {
+ if allowed != nil {
+ if _, ok := allowed[ex]; ok {
+ continue
+ }
+ }
+ extras = append(extras, ex)
+ }
+ if len(extras) != 0 {
+ return fmt.Errorf("extra sibling fields: %+v", extras)
+ }
+ }
+ if v := x.Value; v != nil {
+ return v.Validate(ctx)
+ }
+ return foundUnresolvedRef(x.Ref)
+}
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (x *LinkRef) JSONLookup(token string) (interface{}, error) {
+ if token == "$ref" {
+ return x.Ref, nil
+ }
+ ptr, _, err := jsonpointer.GetForToken(x.Value, token)
+ return ptr, err
+}
+
+// ParameterRef represents either a Parameter or a $ref to a Parameter.
+// When serializing and both fields are set, Ref is preferred over Value.
+type ParameterRef struct {
+ Ref string
+ Value *Parameter
+ extra []string
+}
+
+var _ jsonpointer.JSONPointable = (*ParameterRef)(nil)
+
+func (x *ParameterRef) isEmpty() bool { return x == nil || x.Ref == "" && x.Value == nil }
+
+// MarshalYAML returns the YAML encoding of ParameterRef.
+func (x ParameterRef) MarshalYAML() (interface{}, error) {
+ if ref := x.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ }
+ return x.Value, nil
+}
+
+// MarshalJSON returns the JSON encoding of ParameterRef.
+func (x ParameterRef) MarshalJSON() ([]byte, error) {
+ if ref := x.Ref; ref != "" {
+ return json.Marshal(Ref{Ref: ref})
+ }
+ return x.Value.MarshalJSON()
+}
+
+// UnmarshalJSON sets ParameterRef to a copy of data.
+func (x *ParameterRef) UnmarshalJSON(data []byte) error {
+ var refOnly Ref
+ if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" {
+ x.Ref = refOnly.Ref
+ if len(extra) != 0 {
+ x.extra = make([]string, 0, len(extra))
+ for key := range extra {
+ x.extra = append(x.extra, key)
+ }
+ sort.Strings(x.extra)
+ }
+ return nil
+ }
+ return json.Unmarshal(data, &x.Value)
+}
+
+// Validate returns an error if ParameterRef does not comply with the OpenAPI spec.
+func (x *ParameterRef) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+ if extra := x.extra; len(extra) != 0 {
+ extras := make([]string, 0, len(extra))
+ allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed
+ for _, ex := range extra {
+ if allowed != nil {
+ if _, ok := allowed[ex]; ok {
+ continue
+ }
+ }
+ extras = append(extras, ex)
+ }
+ if len(extras) != 0 {
+ return fmt.Errorf("extra sibling fields: %+v", extras)
+ }
+ }
+ if v := x.Value; v != nil {
+ return v.Validate(ctx)
+ }
+ return foundUnresolvedRef(x.Ref)
+}
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (x *ParameterRef) JSONLookup(token string) (interface{}, error) {
+ if token == "$ref" {
+ return x.Ref, nil
+ }
+ ptr, _, err := jsonpointer.GetForToken(x.Value, token)
+ return ptr, err
+}
+
+// RequestBodyRef represents either a RequestBody or a $ref to a RequestBody.
+// When serializing and both fields are set, Ref is preferred over Value.
+type RequestBodyRef struct {
+ Ref string
+ Value *RequestBody
+ extra []string
+}
+
+var _ jsonpointer.JSONPointable = (*RequestBodyRef)(nil)
+
+func (x *RequestBodyRef) isEmpty() bool { return x == nil || x.Ref == "" && x.Value == nil }
+
+// MarshalYAML returns the YAML encoding of RequestBodyRef.
+func (x RequestBodyRef) MarshalYAML() (interface{}, error) {
+ if ref := x.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ }
+ return x.Value, nil
+}
+
+// MarshalJSON returns the JSON encoding of RequestBodyRef.
+func (x RequestBodyRef) MarshalJSON() ([]byte, error) {
+ if ref := x.Ref; ref != "" {
+ return json.Marshal(Ref{Ref: ref})
+ }
+ return x.Value.MarshalJSON()
+}
+
+// UnmarshalJSON sets RequestBodyRef to a copy of data.
+func (x *RequestBodyRef) UnmarshalJSON(data []byte) error {
+ var refOnly Ref
+ if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" {
+ x.Ref = refOnly.Ref
+ if len(extra) != 0 {
+ x.extra = make([]string, 0, len(extra))
+ for key := range extra {
+ x.extra = append(x.extra, key)
+ }
+ sort.Strings(x.extra)
+ }
+ return nil
+ }
+ return json.Unmarshal(data, &x.Value)
+}
+
+// Validate returns an error if RequestBodyRef does not comply with the OpenAPI spec.
+func (x *RequestBodyRef) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+ if extra := x.extra; len(extra) != 0 {
+ extras := make([]string, 0, len(extra))
+ allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed
+ for _, ex := range extra {
+ if allowed != nil {
+ if _, ok := allowed[ex]; ok {
+ continue
+ }
+ }
+ extras = append(extras, ex)
+ }
+ if len(extras) != 0 {
+ return fmt.Errorf("extra sibling fields: %+v", extras)
+ }
+ }
+ if v := x.Value; v != nil {
+ return v.Validate(ctx)
+ }
+ return foundUnresolvedRef(x.Ref)
+}
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (x *RequestBodyRef) JSONLookup(token string) (interface{}, error) {
+ if token == "$ref" {
+ return x.Ref, nil
+ }
+ ptr, _, err := jsonpointer.GetForToken(x.Value, token)
+ return ptr, err
+}
+
+// ResponseRef represents either a Response or a $ref to a Response.
+// When serializing and both fields are set, Ref is preferred over Value.
+type ResponseRef struct {
+ Ref string
+ Value *Response
+ extra []string
+}
+
+var _ jsonpointer.JSONPointable = (*ResponseRef)(nil)
+
+func (x *ResponseRef) isEmpty() bool { return x == nil || x.Ref == "" && x.Value == nil }
+
+// MarshalYAML returns the YAML encoding of ResponseRef.
+func (x ResponseRef) MarshalYAML() (interface{}, error) {
+ if ref := x.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ }
+ return x.Value, nil
+}
+
+// MarshalJSON returns the JSON encoding of ResponseRef.
+func (x ResponseRef) MarshalJSON() ([]byte, error) {
+ if ref := x.Ref; ref != "" {
+ return json.Marshal(Ref{Ref: ref})
+ }
+ return x.Value.MarshalJSON()
+}
+
+// UnmarshalJSON sets ResponseRef to a copy of data.
+func (x *ResponseRef) UnmarshalJSON(data []byte) error {
+ var refOnly Ref
+ if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" {
+ x.Ref = refOnly.Ref
+ if len(extra) != 0 {
+ x.extra = make([]string, 0, len(extra))
+ for key := range extra {
+ x.extra = append(x.extra, key)
+ }
+ sort.Strings(x.extra)
+ }
+ return nil
+ }
+ return json.Unmarshal(data, &x.Value)
+}
+
+// Validate returns an error if ResponseRef does not comply with the OpenAPI spec.
+func (x *ResponseRef) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+ if extra := x.extra; len(extra) != 0 {
+ extras := make([]string, 0, len(extra))
+ allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed
+ for _, ex := range extra {
+ if allowed != nil {
+ if _, ok := allowed[ex]; ok {
+ continue
+ }
+ }
+ extras = append(extras, ex)
+ }
+ if len(extras) != 0 {
+ return fmt.Errorf("extra sibling fields: %+v", extras)
+ }
+ }
+ if v := x.Value; v != nil {
+ return v.Validate(ctx)
+ }
+ return foundUnresolvedRef(x.Ref)
+}
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (x *ResponseRef) JSONLookup(token string) (interface{}, error) {
+ if token == "$ref" {
+ return x.Ref, nil
+ }
+ ptr, _, err := jsonpointer.GetForToken(x.Value, token)
+ return ptr, err
+}
+
+// SchemaRef represents either a Schema or a $ref to a Schema.
+// When serializing and both fields are set, Ref is preferred over Value.
+type SchemaRef struct {
+ Ref string
+ Value *Schema
+ extra []string
+}
+
+var _ jsonpointer.JSONPointable = (*SchemaRef)(nil)
+
+func (x *SchemaRef) isEmpty() bool { return x == nil || x.Ref == "" && x.Value == nil }
+
+// MarshalYAML returns the YAML encoding of SchemaRef.
+func (x SchemaRef) MarshalYAML() (interface{}, error) {
+ if ref := x.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ }
+ return x.Value, nil
+}
+
+// MarshalJSON returns the JSON encoding of SchemaRef.
+func (x SchemaRef) MarshalJSON() ([]byte, error) {
+ if ref := x.Ref; ref != "" {
+ return json.Marshal(Ref{Ref: ref})
+ }
+ return x.Value.MarshalJSON()
+}
+
+// UnmarshalJSON sets SchemaRef to a copy of data.
+func (x *SchemaRef) UnmarshalJSON(data []byte) error {
+ var refOnly Ref
+ if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" {
+ x.Ref = refOnly.Ref
+ if len(extra) != 0 {
+ x.extra = make([]string, 0, len(extra))
+ for key := range extra {
+ x.extra = append(x.extra, key)
+ }
+ sort.Strings(x.extra)
+ }
+ return nil
+ }
+ return json.Unmarshal(data, &x.Value)
+}
+
+// Validate returns an error if SchemaRef does not comply with the OpenAPI spec.
+func (x *SchemaRef) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+ if extra := x.extra; len(extra) != 0 {
+ extras := make([]string, 0, len(extra))
+ allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed
+ for _, ex := range extra {
+ if allowed != nil {
+ if _, ok := allowed[ex]; ok {
+ continue
+ }
+ }
+ extras = append(extras, ex)
+ }
+ if len(extras) != 0 {
+ return fmt.Errorf("extra sibling fields: %+v", extras)
+ }
+ }
+ if v := x.Value; v != nil {
+ return v.Validate(ctx)
+ }
+ return foundUnresolvedRef(x.Ref)
+}
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (x *SchemaRef) JSONLookup(token string) (interface{}, error) {
+ if token == "$ref" {
+ return x.Ref, nil
+ }
+ ptr, _, err := jsonpointer.GetForToken(x.Value, token)
+ return ptr, err
+}
+
+// SecuritySchemeRef represents either a SecurityScheme or a $ref to a SecurityScheme.
+// When serializing and both fields are set, Ref is preferred over Value.
+type SecuritySchemeRef struct {
+ Ref string
+ Value *SecurityScheme
+ extra []string
+}
+
+var _ jsonpointer.JSONPointable = (*SecuritySchemeRef)(nil)
+
+func (x *SecuritySchemeRef) isEmpty() bool { return x == nil || x.Ref == "" && x.Value == nil }
+
+// MarshalYAML returns the YAML encoding of SecuritySchemeRef.
+func (x SecuritySchemeRef) MarshalYAML() (interface{}, error) {
+ if ref := x.Ref; ref != "" {
+ return &Ref{Ref: ref}, nil
+ }
+ return x.Value, nil
+}
+
+// MarshalJSON returns the JSON encoding of SecuritySchemeRef.
+func (x SecuritySchemeRef) MarshalJSON() ([]byte, error) {
+ if ref := x.Ref; ref != "" {
+ return json.Marshal(Ref{Ref: ref})
+ }
+ return x.Value.MarshalJSON()
+}
+
+// UnmarshalJSON sets SecuritySchemeRef to a copy of data.
+func (x *SecuritySchemeRef) UnmarshalJSON(data []byte) error {
+ var refOnly Ref
+ if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" {
+ x.Ref = refOnly.Ref
+ if len(extra) != 0 {
+ x.extra = make([]string, 0, len(extra))
+ for key := range extra {
+ x.extra = append(x.extra, key)
+ }
+ sort.Strings(x.extra)
+ }
+ return nil
+ }
+ return json.Unmarshal(data, &x.Value)
+}
+
+// Validate returns an error if SecuritySchemeRef does not comply with the OpenAPI spec.
+func (x *SecuritySchemeRef) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+ if extra := x.extra; len(extra) != 0 {
+ extras := make([]string, 0, len(extra))
+ allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed
+ for _, ex := range extra {
+ if allowed != nil {
+ if _, ok := allowed[ex]; ok {
+ continue
+ }
+ }
+ extras = append(extras, ex)
+ }
+ if len(extras) != 0 {
+ return fmt.Errorf("extra sibling fields: %+v", extras)
+ }
+ }
+ if v := x.Value; v != nil {
+ return v.Validate(ctx)
+ }
+ return foundUnresolvedRef(x.Ref)
+}
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (x *SecuritySchemeRef) JSONLookup(token string) (interface{}, error) {
+ if token == "$ref" {
+ return x.Ref, nil
+ }
+ ptr, _, err := jsonpointer.GetForToken(x.Value, token)
+ return ptr, err
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/request_body.go b/vendor/github.com/getkin/kin-openapi/openapi3/request_body.go
new file mode 100644
index 00000000..acd2d0e8
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/request_body.go
@@ -0,0 +1,129 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+)
+
+// RequestBody is specified by OpenAPI/Swagger 3.0 standard.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#request-body-object
+type RequestBody struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ Required bool `json:"required,omitempty" yaml:"required,omitempty"`
+ Content Content `json:"content" yaml:"content"`
+}
+
+func NewRequestBody() *RequestBody {
+ return &RequestBody{}
+}
+
+func (requestBody *RequestBody) WithDescription(value string) *RequestBody {
+ requestBody.Description = value
+ return requestBody
+}
+
+func (requestBody *RequestBody) WithRequired(value bool) *RequestBody {
+ requestBody.Required = value
+ return requestBody
+}
+
+func (requestBody *RequestBody) WithContent(content Content) *RequestBody {
+ requestBody.Content = content
+ return requestBody
+}
+
+func (requestBody *RequestBody) WithSchemaRef(value *SchemaRef, consumes []string) *RequestBody {
+ requestBody.Content = NewContentWithSchemaRef(value, consumes)
+ return requestBody
+}
+
+func (requestBody *RequestBody) WithSchema(value *Schema, consumes []string) *RequestBody {
+ requestBody.Content = NewContentWithSchema(value, consumes)
+ return requestBody
+}
+
+func (requestBody *RequestBody) WithJSONSchemaRef(value *SchemaRef) *RequestBody {
+ requestBody.Content = NewContentWithJSONSchemaRef(value)
+ return requestBody
+}
+
+func (requestBody *RequestBody) WithJSONSchema(value *Schema) *RequestBody {
+ requestBody.Content = NewContentWithJSONSchema(value)
+ return requestBody
+}
+
+func (requestBody *RequestBody) WithFormDataSchemaRef(value *SchemaRef) *RequestBody {
+ requestBody.Content = NewContentWithFormDataSchemaRef(value)
+ return requestBody
+}
+
+func (requestBody *RequestBody) WithFormDataSchema(value *Schema) *RequestBody {
+ requestBody.Content = NewContentWithFormDataSchema(value)
+ return requestBody
+}
+
+func (requestBody *RequestBody) GetMediaType(mediaType string) *MediaType {
+ m := requestBody.Content
+ if m == nil {
+ return nil
+ }
+ return m[mediaType]
+}
+
+// MarshalJSON returns the JSON encoding of RequestBody.
+func (requestBody RequestBody) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 3+len(requestBody.Extensions))
+ for k, v := range requestBody.Extensions {
+ m[k] = v
+ }
+ if x := requestBody.Description; x != "" {
+ m["description"] = requestBody.Description
+ }
+ if x := requestBody.Required; x {
+ m["required"] = x
+ }
+ if x := requestBody.Content; true {
+ m["content"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets RequestBody to a copy of data.
+func (requestBody *RequestBody) UnmarshalJSON(data []byte) error {
+ type RequestBodyBis RequestBody
+ var x RequestBodyBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "description")
+ delete(x.Extensions, "required")
+ delete(x.Extensions, "content")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *requestBody = RequestBody(x)
+ return nil
+}
+
+// Validate returns an error if RequestBody does not comply with the OpenAPI spec.
+func (requestBody *RequestBody) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if requestBody.Content == nil {
+ return errors.New("content of the request body is required")
+ }
+
+ if vo := getValidationOptions(ctx); !vo.examplesValidationDisabled {
+ vo.examplesValidationAsReq, vo.examplesValidationAsRes = true, false
+ }
+
+ if err := requestBody.Content.Validate(ctx); err != nil {
+ return err
+ }
+
+ return validateExtensions(ctx, requestBody.Extensions)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/response.go b/vendor/github.com/getkin/kin-openapi/openapi3/response.go
new file mode 100644
index 00000000..f69c237b
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/response.go
@@ -0,0 +1,233 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "sort"
+ "strconv"
+)
+
+// Responses is specified by OpenAPI/Swagger 3.0 standard.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#responses-object
+type Responses struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ m map[string]*ResponseRef
+}
+
+// NewResponses builds a responses object with response objects in insertion order.
+// Given no arguments, NewResponses returns a valid responses object containing a default match-all reponse.
+func NewResponses(opts ...NewResponsesOption) *Responses {
+ if len(opts) == 0 {
+ return NewResponses(WithName("default", NewResponse().WithDescription("")))
+ }
+ responses := NewResponsesWithCapacity(len(opts))
+ for _, opt := range opts {
+ opt(responses)
+ }
+ return responses
+}
+
+// NewResponsesOption describes options to NewResponses func
+type NewResponsesOption func(*Responses)
+
+// WithStatus adds a status code keyed ResponseRef
+func WithStatus(status int, responseRef *ResponseRef) NewResponsesOption {
+ return func(responses *Responses) {
+ if r := responseRef; r != nil {
+ code := strconv.FormatInt(int64(status), 10)
+ responses.Set(code, r)
+ }
+ }
+}
+
+// WithName adds a name-keyed Response
+func WithName(name string, response *Response) NewResponsesOption {
+ return func(responses *Responses) {
+ if r := response; r != nil && name != "" {
+ responses.Set(name, &ResponseRef{Value: r})
+ }
+ }
+}
+
+// Default returns the default response
+func (responses *Responses) Default() *ResponseRef {
+ return responses.Value("default")
+}
+
+// Status returns a ResponseRef for the given status
+// If an exact match isn't initially found a patterned field is checked using
+// the first digit to determine the range (eg: 201 to 2XX)
+// See https://spec.openapis.org/oas/v3.0.3#patterned-fields-0
+func (responses *Responses) Status(status int) *ResponseRef {
+ st := strconv.FormatInt(int64(status), 10)
+ if rref := responses.Value(st); rref != nil {
+ return rref
+ }
+ if 99 < status && status < 600 {
+ st = string(st[0]) + "XX"
+ switch st {
+ case "1XX", "2XX", "3XX", "4XX", "5XX":
+ return responses.Value(st)
+ }
+ }
+ return nil
+}
+
+// Validate returns an error if Responses does not comply with the OpenAPI spec.
+func (responses *Responses) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if responses.Len() == 0 {
+ return errors.New("the responses object MUST contain at least one response code")
+ }
+
+ keys := make([]string, 0, responses.Len())
+ for key := range responses.Map() {
+ keys = append(keys, key)
+ }
+ sort.Strings(keys)
+ for _, key := range keys {
+ v := responses.Value(key)
+ if err := v.Validate(ctx); err != nil {
+ return err
+ }
+ }
+
+ return validateExtensions(ctx, responses.Extensions)
+}
+
+// Support YAML Marshaler interface for gopkg.in/yaml
+func (responses *Responses) MarshalYAML() (any, error) {
+ res := make(map[string]any, len(responses.Extensions)+len(responses.m))
+
+ for k, v := range responses.Extensions {
+ res[k] = v
+ }
+
+ for k, v := range responses.m {
+ res[k] = v
+ }
+
+ return res, nil
+}
+
+// Response is specified by OpenAPI/Swagger 3.0 standard.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#response-object
+type Response struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ Description *string `json:"description,omitempty" yaml:"description,omitempty"`
+ Headers Headers `json:"headers,omitempty" yaml:"headers,omitempty"`
+ Content Content `json:"content,omitempty" yaml:"content,omitempty"`
+ Links Links `json:"links,omitempty" yaml:"links,omitempty"`
+}
+
+func NewResponse() *Response {
+ return &Response{}
+}
+
+func (response *Response) WithDescription(value string) *Response {
+ response.Description = &value
+ return response
+}
+
+func (response *Response) WithContent(content Content) *Response {
+ response.Content = content
+ return response
+}
+
+func (response *Response) WithJSONSchema(schema *Schema) *Response {
+ response.Content = NewContentWithJSONSchema(schema)
+ return response
+}
+
+func (response *Response) WithJSONSchemaRef(schema *SchemaRef) *Response {
+ response.Content = NewContentWithJSONSchemaRef(schema)
+ return response
+}
+
+// MarshalJSON returns the JSON encoding of Response.
+func (response Response) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 4+len(response.Extensions))
+ for k, v := range response.Extensions {
+ m[k] = v
+ }
+ if x := response.Description; x != nil {
+ m["description"] = x
+ }
+ if x := response.Headers; len(x) != 0 {
+ m["headers"] = x
+ }
+ if x := response.Content; len(x) != 0 {
+ m["content"] = x
+ }
+ if x := response.Links; len(x) != 0 {
+ m["links"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets Response to a copy of data.
+func (response *Response) UnmarshalJSON(data []byte) error {
+ type ResponseBis Response
+ var x ResponseBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "description")
+ delete(x.Extensions, "headers")
+ delete(x.Extensions, "content")
+ delete(x.Extensions, "links")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *response = Response(x)
+ return nil
+}
+
+// Validate returns an error if Response does not comply with the OpenAPI spec.
+func (response *Response) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if response.Description == nil {
+ return errors.New("a short description of the response is required")
+ }
+ if vo := getValidationOptions(ctx); !vo.examplesValidationDisabled {
+ vo.examplesValidationAsReq, vo.examplesValidationAsRes = false, true
+ }
+
+ if content := response.Content; content != nil {
+ if err := content.Validate(ctx); err != nil {
+ return err
+ }
+ }
+
+ headers := make([]string, 0, len(response.Headers))
+ for name := range response.Headers {
+ headers = append(headers, name)
+ }
+ sort.Strings(headers)
+ for _, name := range headers {
+ header := response.Headers[name]
+ if err := header.Validate(ctx); err != nil {
+ return err
+ }
+ }
+
+ links := make([]string, 0, len(response.Links))
+ for name := range response.Links {
+ links = append(links, name)
+ }
+ sort.Strings(links)
+ for _, name := range links {
+ link := response.Links[name]
+ if err := link.Validate(ctx); err != nil {
+ return err
+ }
+ }
+
+ return validateExtensions(ctx, response.Extensions)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/schema.go b/vendor/github.com/getkin/kin-openapi/openapi3/schema.go
new file mode 100644
index 00000000..ae28afef
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/schema.go
@@ -0,0 +1,2237 @@
+package openapi3
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "math"
+ "math/big"
+ "reflect"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "unicode/utf16"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/mohae/deepcopy"
+)
+
+const (
+ TypeArray = "array"
+ TypeBoolean = "boolean"
+ TypeInteger = "integer"
+ TypeNumber = "number"
+ TypeObject = "object"
+ TypeString = "string"
+ TypeNull = "null"
+
+ // constants for integer formats
+ formatMinInt32 = float64(math.MinInt32)
+ formatMaxInt32 = float64(math.MaxInt32)
+ formatMinInt64 = float64(math.MinInt64)
+ formatMaxInt64 = float64(math.MaxInt64)
+)
+
+var (
+ // SchemaErrorDetailsDisabled disables printing of details about schema errors.
+ SchemaErrorDetailsDisabled = false
+
+ errSchema = errors.New("input does not match the schema")
+
+ // ErrOneOfConflict is the SchemaError Origin when data matches more than one oneOf schema
+ ErrOneOfConflict = errors.New("input matches more than one oneOf schemas")
+
+ // ErrSchemaInputNaN may be returned when validating a number
+ ErrSchemaInputNaN = errors.New("floating point NaN is not allowed")
+ // ErrSchemaInputInf may be returned when validating a number
+ ErrSchemaInputInf = errors.New("floating point Inf is not allowed")
+
+ compiledPatterns sync.Map
+)
+
+// NewSchemaRef simply builds a SchemaRef
+func NewSchemaRef(ref string, value *Schema) *SchemaRef {
+ return &SchemaRef{
+ Ref: ref,
+ Value: value,
+ }
+}
+
+type SchemaRefs []*SchemaRef
+
+var _ jsonpointer.JSONPointable = (*SchemaRefs)(nil)
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (s SchemaRefs) JSONLookup(token string) (interface{}, error) {
+ i, err := strconv.ParseUint(token, 10, 64)
+ if err != nil {
+ return nil, err
+ }
+
+ if i >= uint64(len(s)) {
+ return nil, fmt.Errorf("index out of range: %d", i)
+ }
+
+ ref := s[i]
+
+ if ref == nil || ref.Ref != "" {
+ return &Ref{Ref: ref.Ref}, nil
+ }
+ return ref.Value, nil
+}
+
+// Schema is specified by OpenAPI/Swagger 3.0 standard.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#schema-object
+type Schema struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ OneOf SchemaRefs `json:"oneOf,omitempty" yaml:"oneOf,omitempty"`
+ AnyOf SchemaRefs `json:"anyOf,omitempty" yaml:"anyOf,omitempty"`
+ AllOf SchemaRefs `json:"allOf,omitempty" yaml:"allOf,omitempty"`
+ Not *SchemaRef `json:"not,omitempty" yaml:"not,omitempty"`
+ Type *Types `json:"type,omitempty" yaml:"type,omitempty"`
+ Title string `json:"title,omitempty" yaml:"title,omitempty"`
+ Format string `json:"format,omitempty" yaml:"format,omitempty"`
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ Enum []interface{} `json:"enum,omitempty" yaml:"enum,omitempty"`
+ Default interface{} `json:"default,omitempty" yaml:"default,omitempty"`
+ Example interface{} `json:"example,omitempty" yaml:"example,omitempty"`
+ ExternalDocs *ExternalDocs `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"`
+
+ // Array-related, here for struct compactness
+ UniqueItems bool `json:"uniqueItems,omitempty" yaml:"uniqueItems,omitempty"`
+ // Number-related, here for struct compactness
+ ExclusiveMin bool `json:"exclusiveMinimum,omitempty" yaml:"exclusiveMinimum,omitempty"`
+ ExclusiveMax bool `json:"exclusiveMaximum,omitempty" yaml:"exclusiveMaximum,omitempty"`
+ // Properties
+ Nullable bool `json:"nullable,omitempty" yaml:"nullable,omitempty"`
+ ReadOnly bool `json:"readOnly,omitempty" yaml:"readOnly,omitempty"`
+ WriteOnly bool `json:"writeOnly,omitempty" yaml:"writeOnly,omitempty"`
+ AllowEmptyValue bool `json:"allowEmptyValue,omitempty" yaml:"allowEmptyValue,omitempty"`
+ Deprecated bool `json:"deprecated,omitempty" yaml:"deprecated,omitempty"`
+ XML *XML `json:"xml,omitempty" yaml:"xml,omitempty"`
+
+ // Number
+ Min *float64 `json:"minimum,omitempty" yaml:"minimum,omitempty"`
+ Max *float64 `json:"maximum,omitempty" yaml:"maximum,omitempty"`
+ MultipleOf *float64 `json:"multipleOf,omitempty" yaml:"multipleOf,omitempty"`
+
+ // String
+ MinLength uint64 `json:"minLength,omitempty" yaml:"minLength,omitempty"`
+ MaxLength *uint64 `json:"maxLength,omitempty" yaml:"maxLength,omitempty"`
+ Pattern string `json:"pattern,omitempty" yaml:"pattern,omitempty"`
+
+ // Array
+ MinItems uint64 `json:"minItems,omitempty" yaml:"minItems,omitempty"`
+ MaxItems *uint64 `json:"maxItems,omitempty" yaml:"maxItems,omitempty"`
+ Items *SchemaRef `json:"items,omitempty" yaml:"items,omitempty"`
+
+ // Object
+ Required []string `json:"required,omitempty" yaml:"required,omitempty"`
+ Properties Schemas `json:"properties,omitempty" yaml:"properties,omitempty"`
+ MinProps uint64 `json:"minProperties,omitempty" yaml:"minProperties,omitempty"`
+ MaxProps *uint64 `json:"maxProperties,omitempty" yaml:"maxProperties,omitempty"`
+ AdditionalProperties AdditionalProperties `json:"additionalProperties,omitempty" yaml:"additionalProperties,omitempty"`
+ Discriminator *Discriminator `json:"discriminator,omitempty" yaml:"discriminator,omitempty"`
+}
+
+type Types []string
+
+func (types *Types) Is(typ string) bool {
+ return types != nil && len(*types) == 1 && (*types)[0] == typ
+}
+
+func (types *Types) Slice() []string {
+ if types == nil {
+ return nil
+ }
+ return *types
+}
+
+func (pTypes *Types) Includes(typ string) bool {
+ if pTypes == nil {
+ return false
+ }
+ types := *pTypes
+ for _, candidate := range types {
+ if candidate == typ {
+ return true
+ }
+ }
+ return false
+}
+
+func (types *Types) Permits(typ string) bool {
+ if types == nil {
+ return true
+ }
+ return types.Includes(typ)
+}
+
+func (pTypes *Types) MarshalJSON() ([]byte, error) {
+ x, err := pTypes.MarshalYAML()
+ if err != nil {
+ return nil, err
+ }
+ return json.Marshal(x)
+}
+
+func (pTypes *Types) MarshalYAML() (interface{}, error) {
+ if pTypes == nil {
+ return nil, nil
+ }
+ types := *pTypes
+ switch len(types) {
+ case 0:
+ return nil, nil
+ case 1:
+ return types[0], nil
+ default:
+ return []string(types), nil
+ }
+}
+
+func (types *Types) UnmarshalJSON(data []byte) error {
+ var strings []string
+ if err := json.Unmarshal(data, &strings); err != nil {
+ var s string
+ if err := json.Unmarshal(data, &s); err != nil {
+ return unmarshalError(err)
+ }
+ strings = []string{s}
+ }
+ *types = strings
+ return nil
+}
+
+type AdditionalProperties struct {
+ Has *bool
+ Schema *SchemaRef
+}
+
+// MarshalYAML returns the YAML encoding of AdditionalProperties.
+func (addProps AdditionalProperties) MarshalYAML() (interface{}, error) {
+ if x := addProps.Has; x != nil {
+ if *x {
+ return true, nil
+ }
+ return false, nil
+ }
+ if x := addProps.Schema; x != nil {
+ return x.Value, nil
+ }
+ return nil, nil
+}
+
+// MarshalJSON returns the JSON encoding of AdditionalProperties.
+func (addProps AdditionalProperties) MarshalJSON() ([]byte, error) {
+ if x := addProps.Has; x != nil {
+ if *x {
+ return []byte("true"), nil
+ }
+ return []byte("false"), nil
+ }
+ if x := addProps.Schema; x != nil {
+ return json.Marshal(x)
+ }
+ return nil, nil
+}
+
+// UnmarshalJSON sets AdditionalProperties to a copy of data.
+func (addProps *AdditionalProperties) UnmarshalJSON(data []byte) error {
+ var x interface{}
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ switch y := x.(type) {
+ case nil:
+ case bool:
+ addProps.Has = &y
+ case map[string]interface{}:
+ if len(y) == 0 {
+ addProps.Schema = &SchemaRef{Value: &Schema{}}
+ } else {
+ buf := new(bytes.Buffer)
+ json.NewEncoder(buf).Encode(y)
+ if err := json.NewDecoder(buf).Decode(&addProps.Schema); err != nil {
+ return err
+ }
+ }
+ default:
+ return errors.New("cannot unmarshal additionalProperties: value must be either a schema object or a boolean")
+ }
+ return nil
+}
+
+var _ jsonpointer.JSONPointable = (*Schema)(nil)
+
+func NewSchema() *Schema {
+ return &Schema{}
+}
+
+// MarshalJSON returns the JSON encoding of Schema.
+func (schema Schema) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 36+len(schema.Extensions))
+ for k, v := range schema.Extensions {
+ m[k] = v
+ }
+
+ if x := schema.OneOf; len(x) != 0 {
+ m["oneOf"] = x
+ }
+ if x := schema.AnyOf; len(x) != 0 {
+ m["anyOf"] = x
+ }
+ if x := schema.AllOf; len(x) != 0 {
+ m["allOf"] = x
+ }
+ if x := schema.Not; x != nil {
+ m["not"] = x
+ }
+ if x := schema.Type; x != nil {
+ m["type"] = x
+ }
+ if x := schema.Title; len(x) != 0 {
+ m["title"] = x
+ }
+ if x := schema.Format; len(x) != 0 {
+ m["format"] = x
+ }
+ if x := schema.Description; len(x) != 0 {
+ m["description"] = x
+ }
+ if x := schema.Enum; len(x) != 0 {
+ m["enum"] = x
+ }
+ if x := schema.Default; x != nil {
+ m["default"] = x
+ }
+ if x := schema.Example; x != nil {
+ m["example"] = x
+ }
+ if x := schema.ExternalDocs; x != nil {
+ m["externalDocs"] = x
+ }
+
+ // Array-related
+ if x := schema.UniqueItems; x {
+ m["uniqueItems"] = x
+ }
+ // Number-related
+ if x := schema.ExclusiveMin; x {
+ m["exclusiveMinimum"] = x
+ }
+ if x := schema.ExclusiveMax; x {
+ m["exclusiveMaximum"] = x
+ }
+ // Properties
+ if x := schema.Nullable; x {
+ m["nullable"] = x
+ }
+ if x := schema.ReadOnly; x {
+ m["readOnly"] = x
+ }
+ if x := schema.WriteOnly; x {
+ m["writeOnly"] = x
+ }
+ if x := schema.AllowEmptyValue; x {
+ m["allowEmptyValue"] = x
+ }
+ if x := schema.Deprecated; x {
+ m["deprecated"] = x
+ }
+ if x := schema.XML; x != nil {
+ m["xml"] = x
+ }
+
+ // Number
+ if x := schema.Min; x != nil {
+ m["minimum"] = x
+ }
+ if x := schema.Max; x != nil {
+ m["maximum"] = x
+ }
+ if x := schema.MultipleOf; x != nil {
+ m["multipleOf"] = x
+ }
+
+ // String
+ if x := schema.MinLength; x != 0 {
+ m["minLength"] = x
+ }
+ if x := schema.MaxLength; x != nil {
+ m["maxLength"] = x
+ }
+ if x := schema.Pattern; x != "" {
+ m["pattern"] = x
+ }
+
+ // Array
+ if x := schema.MinItems; x != 0 {
+ m["minItems"] = x
+ }
+ if x := schema.MaxItems; x != nil {
+ m["maxItems"] = x
+ }
+ if x := schema.Items; x != nil {
+ m["items"] = x
+ }
+
+ // Object
+ if x := schema.Required; len(x) != 0 {
+ m["required"] = x
+ }
+ if x := schema.Properties; len(x) != 0 {
+ m["properties"] = x
+ }
+ if x := schema.MinProps; x != 0 {
+ m["minProperties"] = x
+ }
+ if x := schema.MaxProps; x != nil {
+ m["maxProperties"] = x
+ }
+ if x := schema.AdditionalProperties; x.Has != nil || x.Schema != nil {
+ m["additionalProperties"] = &x
+ }
+ if x := schema.Discriminator; x != nil {
+ m["discriminator"] = x
+ }
+
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets Schema to a copy of data.
+func (schema *Schema) UnmarshalJSON(data []byte) error {
+ type SchemaBis Schema
+ var x SchemaBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+
+ delete(x.Extensions, "oneOf")
+ delete(x.Extensions, "anyOf")
+ delete(x.Extensions, "allOf")
+ delete(x.Extensions, "not")
+ delete(x.Extensions, "type")
+ delete(x.Extensions, "title")
+ delete(x.Extensions, "format")
+ delete(x.Extensions, "description")
+ delete(x.Extensions, "enum")
+ delete(x.Extensions, "default")
+ delete(x.Extensions, "example")
+ delete(x.Extensions, "externalDocs")
+
+ // Array-related
+ delete(x.Extensions, "uniqueItems")
+ // Number-related
+ delete(x.Extensions, "exclusiveMinimum")
+ delete(x.Extensions, "exclusiveMaximum")
+ // Properties
+ delete(x.Extensions, "nullable")
+ delete(x.Extensions, "readOnly")
+ delete(x.Extensions, "writeOnly")
+ delete(x.Extensions, "allowEmptyValue")
+ delete(x.Extensions, "deprecated")
+ delete(x.Extensions, "xml")
+
+ // Number
+ delete(x.Extensions, "minimum")
+ delete(x.Extensions, "maximum")
+ delete(x.Extensions, "multipleOf")
+
+ // String
+ delete(x.Extensions, "minLength")
+ delete(x.Extensions, "maxLength")
+ delete(x.Extensions, "pattern")
+
+ // Array
+ delete(x.Extensions, "minItems")
+ delete(x.Extensions, "maxItems")
+ delete(x.Extensions, "items")
+
+ // Object
+ delete(x.Extensions, "required")
+ delete(x.Extensions, "properties")
+ delete(x.Extensions, "minProperties")
+ delete(x.Extensions, "maxProperties")
+ delete(x.Extensions, "additionalProperties")
+ delete(x.Extensions, "discriminator")
+
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+
+ *schema = Schema(x)
+
+ if schema.Format == "date" {
+ // This is a fix for: https://github.com/getkin/kin-openapi/issues/697
+ if eg, ok := schema.Example.(string); ok {
+ schema.Example = strings.TrimSuffix(eg, "T00:00:00Z")
+ }
+ }
+ return nil
+}
+
+// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable
+func (schema Schema) JSONLookup(token string) (interface{}, error) {
+ switch token {
+ case "additionalProperties":
+ if addProps := schema.AdditionalProperties.Has; addProps != nil {
+ return *addProps, nil
+ }
+ if addProps := schema.AdditionalProperties.Schema; addProps != nil {
+ if addProps.Ref != "" {
+ return &Ref{Ref: addProps.Ref}, nil
+ }
+ return addProps.Value, nil
+ }
+ case "not":
+ if schema.Not != nil {
+ if schema.Not.Ref != "" {
+ return &Ref{Ref: schema.Not.Ref}, nil
+ }
+ return schema.Not.Value, nil
+ }
+ case "items":
+ if schema.Items != nil {
+ if schema.Items.Ref != "" {
+ return &Ref{Ref: schema.Items.Ref}, nil
+ }
+ return schema.Items.Value, nil
+ }
+ case "oneOf":
+ return schema.OneOf, nil
+ case "anyOf":
+ return schema.AnyOf, nil
+ case "allOf":
+ return schema.AllOf, nil
+ case "type":
+ return schema.Type, nil
+ case "title":
+ return schema.Title, nil
+ case "format":
+ return schema.Format, nil
+ case "description":
+ return schema.Description, nil
+ case "enum":
+ return schema.Enum, nil
+ case "default":
+ return schema.Default, nil
+ case "example":
+ return schema.Example, nil
+ case "externalDocs":
+ return schema.ExternalDocs, nil
+ case "uniqueItems":
+ return schema.UniqueItems, nil
+ case "exclusiveMin":
+ return schema.ExclusiveMin, nil
+ case "exclusiveMax":
+ return schema.ExclusiveMax, nil
+ case "nullable":
+ return schema.Nullable, nil
+ case "readOnly":
+ return schema.ReadOnly, nil
+ case "writeOnly":
+ return schema.WriteOnly, nil
+ case "allowEmptyValue":
+ return schema.AllowEmptyValue, nil
+ case "xml":
+ return schema.XML, nil
+ case "deprecated":
+ return schema.Deprecated, nil
+ case "min":
+ return schema.Min, nil
+ case "max":
+ return schema.Max, nil
+ case "multipleOf":
+ return schema.MultipleOf, nil
+ case "minLength":
+ return schema.MinLength, nil
+ case "maxLength":
+ return schema.MaxLength, nil
+ case "pattern":
+ return schema.Pattern, nil
+ case "minItems":
+ return schema.MinItems, nil
+ case "maxItems":
+ return schema.MaxItems, nil
+ case "required":
+ return schema.Required, nil
+ case "properties":
+ return schema.Properties, nil
+ case "minProps":
+ return schema.MinProps, nil
+ case "maxProps":
+ return schema.MaxProps, nil
+ case "discriminator":
+ return schema.Discriminator, nil
+ }
+
+ v, _, err := jsonpointer.GetForToken(schema.Extensions, token)
+ return v, err
+}
+
+func (schema *Schema) NewRef() *SchemaRef {
+ return &SchemaRef{
+ Value: schema,
+ }
+}
+
+func NewOneOfSchema(schemas ...*Schema) *Schema {
+ refs := make([]*SchemaRef, 0, len(schemas))
+ for _, schema := range schemas {
+ refs = append(refs, &SchemaRef{Value: schema})
+ }
+ return &Schema{
+ OneOf: refs,
+ }
+}
+
+func NewAnyOfSchema(schemas ...*Schema) *Schema {
+ refs := make([]*SchemaRef, 0, len(schemas))
+ for _, schema := range schemas {
+ refs = append(refs, &SchemaRef{Value: schema})
+ }
+ return &Schema{
+ AnyOf: refs,
+ }
+}
+
+func NewAllOfSchema(schemas ...*Schema) *Schema {
+ refs := make([]*SchemaRef, 0, len(schemas))
+ for _, schema := range schemas {
+ refs = append(refs, &SchemaRef{Value: schema})
+ }
+ return &Schema{
+ AllOf: refs,
+ }
+}
+
+func NewBoolSchema() *Schema {
+ return &Schema{
+ Type: &Types{TypeBoolean},
+ }
+}
+
+func NewFloat64Schema() *Schema {
+ return &Schema{
+ Type: &Types{TypeNumber},
+ }
+}
+
+func NewIntegerSchema() *Schema {
+ return &Schema{
+ Type: &Types{TypeInteger},
+ }
+}
+
+func NewInt32Schema() *Schema {
+ return &Schema{
+ Type: &Types{TypeInteger},
+ Format: "int32",
+ }
+}
+
+func NewInt64Schema() *Schema {
+ return &Schema{
+ Type: &Types{TypeInteger},
+ Format: "int64",
+ }
+}
+
+func NewStringSchema() *Schema {
+ return &Schema{
+ Type: &Types{TypeString},
+ }
+}
+
+func NewDateTimeSchema() *Schema {
+ return &Schema{
+ Type: &Types{TypeString},
+ Format: "date-time",
+ }
+}
+
+func NewUUIDSchema() *Schema {
+ return &Schema{
+ Type: &Types{TypeString},
+ Format: "uuid",
+ }
+}
+
+func NewBytesSchema() *Schema {
+ return &Schema{
+ Type: &Types{TypeString},
+ Format: "byte",
+ }
+}
+
+func NewArraySchema() *Schema {
+ return &Schema{
+ Type: &Types{TypeArray},
+ }
+}
+
+func NewObjectSchema() *Schema {
+ return &Schema{
+ Type: &Types{TypeObject},
+ Properties: make(Schemas),
+ }
+}
+
+func (schema *Schema) WithNullable() *Schema {
+ schema.Nullable = true
+ return schema
+}
+
+func (schema *Schema) WithMin(value float64) *Schema {
+ schema.Min = &value
+ return schema
+}
+
+func (schema *Schema) WithMax(value float64) *Schema {
+ schema.Max = &value
+ return schema
+}
+
+func (schema *Schema) WithExclusiveMin(value bool) *Schema {
+ schema.ExclusiveMin = value
+ return schema
+}
+
+func (schema *Schema) WithExclusiveMax(value bool) *Schema {
+ schema.ExclusiveMax = value
+ return schema
+}
+
+func (schema *Schema) WithEnum(values ...interface{}) *Schema {
+ schema.Enum = values
+ return schema
+}
+
+func (schema *Schema) WithDefault(defaultValue interface{}) *Schema {
+ schema.Default = defaultValue
+ return schema
+}
+
+func (schema *Schema) WithFormat(value string) *Schema {
+ schema.Format = value
+ return schema
+}
+
+func (schema *Schema) WithLength(i int64) *Schema {
+ n := uint64(i)
+ schema.MinLength = n
+ schema.MaxLength = &n
+ return schema
+}
+
+func (schema *Schema) WithMinLength(i int64) *Schema {
+ n := uint64(i)
+ schema.MinLength = n
+ return schema
+}
+
+func (schema *Schema) WithMaxLength(i int64) *Schema {
+ n := uint64(i)
+ schema.MaxLength = &n
+ return schema
+}
+
+func (schema *Schema) WithLengthDecodedBase64(i int64) *Schema {
+ n := uint64(i)
+ v := (n*8 + 5) / 6
+ schema.MinLength = v
+ schema.MaxLength = &v
+ return schema
+}
+
+func (schema *Schema) WithMinLengthDecodedBase64(i int64) *Schema {
+ n := uint64(i)
+ schema.MinLength = (n*8 + 5) / 6
+ return schema
+}
+
+func (schema *Schema) WithMaxLengthDecodedBase64(i int64) *Schema {
+ n := uint64(i)
+ schema.MinLength = (n*8 + 5) / 6
+ return schema
+}
+
+func (schema *Schema) WithPattern(pattern string) *Schema {
+ schema.Pattern = pattern
+ return schema
+}
+
+func (schema *Schema) WithItems(value *Schema) *Schema {
+ schema.Items = &SchemaRef{
+ Value: value,
+ }
+ return schema
+}
+
+func (schema *Schema) WithMinItems(i int64) *Schema {
+ n := uint64(i)
+ schema.MinItems = n
+ return schema
+}
+
+func (schema *Schema) WithMaxItems(i int64) *Schema {
+ n := uint64(i)
+ schema.MaxItems = &n
+ return schema
+}
+
+func (schema *Schema) WithUniqueItems(unique bool) *Schema {
+ schema.UniqueItems = unique
+ return schema
+}
+
+func (schema *Schema) WithProperty(name string, propertySchema *Schema) *Schema {
+ return schema.WithPropertyRef(name, &SchemaRef{
+ Value: propertySchema,
+ })
+}
+
+func (schema *Schema) WithPropertyRef(name string, ref *SchemaRef) *Schema {
+ properties := schema.Properties
+ if properties == nil {
+ properties = make(Schemas)
+ schema.Properties = properties
+ }
+ properties[name] = ref
+ return schema
+}
+
+func (schema *Schema) WithProperties(properties map[string]*Schema) *Schema {
+ result := make(Schemas, len(properties))
+ for k, v := range properties {
+ result[k] = &SchemaRef{
+ Value: v,
+ }
+ }
+ schema.Properties = result
+ return schema
+}
+
+func (schema *Schema) WithRequired(required []string) *Schema {
+ schema.Required = required
+ return schema
+}
+
+func (schema *Schema) WithMinProperties(i int64) *Schema {
+ n := uint64(i)
+ schema.MinProps = n
+ return schema
+}
+
+func (schema *Schema) WithMaxProperties(i int64) *Schema {
+ n := uint64(i)
+ schema.MaxProps = &n
+ return schema
+}
+
+func (schema *Schema) WithAnyAdditionalProperties() *Schema {
+ schema.AdditionalProperties = AdditionalProperties{Has: BoolPtr(true)}
+ return schema
+}
+
+func (schema *Schema) WithoutAdditionalProperties() *Schema {
+ schema.AdditionalProperties = AdditionalProperties{Has: BoolPtr(false)}
+ return schema
+}
+
+func (schema *Schema) WithAdditionalProperties(v *Schema) *Schema {
+ schema.AdditionalProperties = AdditionalProperties{}
+ if v != nil {
+ schema.AdditionalProperties.Schema = &SchemaRef{Value: v}
+ }
+ return schema
+}
+
+func (schema *Schema) PermitsNull() bool {
+ return schema.Nullable || schema.Type.Includes("null")
+}
+
+// IsEmpty tells whether schema is equivalent to the empty schema `{}`.
+func (schema *Schema) IsEmpty() bool {
+ if schema.Type != nil || schema.Format != "" || len(schema.Enum) != 0 ||
+ schema.UniqueItems || schema.ExclusiveMin || schema.ExclusiveMax ||
+ schema.Nullable || schema.ReadOnly || schema.WriteOnly || schema.AllowEmptyValue ||
+ schema.Min != nil || schema.Max != nil || schema.MultipleOf != nil ||
+ schema.MinLength != 0 || schema.MaxLength != nil || schema.Pattern != "" ||
+ schema.MinItems != 0 || schema.MaxItems != nil ||
+ len(schema.Required) != 0 ||
+ schema.MinProps != 0 || schema.MaxProps != nil {
+ return false
+ }
+ if n := schema.Not; n != nil && n.Value != nil && !n.Value.IsEmpty() {
+ return false
+ }
+ if ap := schema.AdditionalProperties.Schema; ap != nil && ap.Value != nil && !ap.Value.IsEmpty() {
+ return false
+ }
+ if apa := schema.AdditionalProperties.Has; apa != nil && !*apa {
+ return false
+ }
+ if items := schema.Items; items != nil && items.Value != nil && !items.Value.IsEmpty() {
+ return false
+ }
+ for _, s := range schema.Properties {
+ if ss := s.Value; ss != nil && !ss.IsEmpty() {
+ return false
+ }
+ }
+ for _, s := range schema.OneOf {
+ if ss := s.Value; ss != nil && !ss.IsEmpty() {
+ return false
+ }
+ }
+ for _, s := range schema.AnyOf {
+ if ss := s.Value; ss != nil && !ss.IsEmpty() {
+ return false
+ }
+ }
+ for _, s := range schema.AllOf {
+ if ss := s.Value; ss != nil && !ss.IsEmpty() {
+ return false
+ }
+ }
+ return true
+}
+
+// Validate returns an error if Schema does not comply with the OpenAPI spec.
+func (schema *Schema) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+ _, err := schema.validate(ctx, []*Schema{})
+ return err
+}
+
+// returns the updated stack and an error if Schema does not comply with the OpenAPI spec.
+func (schema *Schema) validate(ctx context.Context, stack []*Schema) ([]*Schema, error) {
+ validationOpts := getValidationOptions(ctx)
+
+ for _, existing := range stack {
+ if existing == schema {
+ return stack, nil
+ }
+ }
+ stack = append(stack, schema)
+
+ if schema.ReadOnly && schema.WriteOnly {
+ return stack, errors.New("a property MUST NOT be marked as both readOnly and writeOnly being true")
+ }
+
+ for _, item := range schema.OneOf {
+ v := item.Value
+ if v == nil {
+ return stack, foundUnresolvedRef(item.Ref)
+ }
+
+ var err error
+ if stack, err = v.validate(ctx, stack); err != nil {
+ return stack, err
+ }
+ }
+
+ for _, item := range schema.AnyOf {
+ v := item.Value
+ if v == nil {
+ return stack, foundUnresolvedRef(item.Ref)
+ }
+
+ var err error
+ if stack, err = v.validate(ctx, stack); err != nil {
+ return stack, err
+ }
+ }
+
+ for _, item := range schema.AllOf {
+ v := item.Value
+ if v == nil {
+ return stack, foundUnresolvedRef(item.Ref)
+ }
+
+ var err error
+ if stack, err = v.validate(ctx, stack); err != nil {
+ return stack, err
+ }
+ }
+
+ if ref := schema.Not; ref != nil {
+ v := ref.Value
+ if v == nil {
+ return stack, foundUnresolvedRef(ref.Ref)
+ }
+
+ var err error
+ if stack, err = v.validate(ctx, stack); err != nil {
+ return stack, err
+ }
+ }
+
+ for _, schemaType := range schema.Type.Slice() {
+ switch schemaType {
+ case TypeBoolean:
+ case TypeNumber:
+ if format := schema.Format; len(format) > 0 {
+ switch format {
+ case "float", "double":
+ default:
+ if validationOpts.schemaFormatValidationEnabled {
+ return stack, unsupportedFormat(format)
+ }
+ }
+ }
+ case TypeInteger:
+ if format := schema.Format; len(format) > 0 {
+ switch format {
+ case "int32", "int64":
+ default:
+ if validationOpts.schemaFormatValidationEnabled {
+ return stack, unsupportedFormat(format)
+ }
+ }
+ }
+ case TypeString:
+ if format := schema.Format; len(format) > 0 {
+ switch format {
+ // Supported by OpenAPIv3.0.3:
+ // https://spec.openapis.org/oas/v3.0.3
+ case "byte", "binary", "date", "date-time", "password":
+ // In JSON Draft-07 (not validated yet though):
+ // https://json-schema.org/draft-07/json-schema-release-notes.html#formats
+ case "iri", "iri-reference", "uri-template", "idn-email", "idn-hostname":
+ case "json-pointer", "relative-json-pointer", "regex", "time":
+ // In JSON Draft 2019-09 (not validated yet though):
+ // https://json-schema.org/draft/2019-09/release-notes.html#format-vocabulary
+ case "duration", "uuid":
+ // Defined in some other specification
+ case "email", "hostname", "ipv4", "ipv6", "uri", "uri-reference":
+ default:
+ // Try to check for custom defined formats
+ if _, ok := SchemaStringFormats[format]; !ok && validationOpts.schemaFormatValidationEnabled {
+ return stack, unsupportedFormat(format)
+ }
+ }
+ }
+ if !validationOpts.schemaPatternValidationDisabled && schema.Pattern != "" {
+ if _, err := schema.compilePattern(); err != nil {
+ return stack, err
+ }
+ }
+ case TypeArray:
+ if schema.Items == nil {
+ return stack, errors.New("when schema type is 'array', schema 'items' must be non-null")
+ }
+ case TypeObject:
+ default:
+ return stack, fmt.Errorf("unsupported 'type' value %q", schemaType)
+ }
+ }
+
+ if ref := schema.Items; ref != nil {
+ v := ref.Value
+ if v == nil {
+ return stack, foundUnresolvedRef(ref.Ref)
+ }
+
+ var err error
+ if stack, err = v.validate(ctx, stack); err != nil {
+ return stack, err
+ }
+ }
+
+ properties := make([]string, 0, len(schema.Properties))
+ for name := range schema.Properties {
+ properties = append(properties, name)
+ }
+ sort.Strings(properties)
+ for _, name := range properties {
+ ref := schema.Properties[name]
+ v := ref.Value
+ if v == nil {
+ return stack, foundUnresolvedRef(ref.Ref)
+ }
+
+ var err error
+ if stack, err = v.validate(ctx, stack); err != nil {
+ return stack, err
+ }
+ }
+
+ if schema.AdditionalProperties.Has != nil && schema.AdditionalProperties.Schema != nil {
+ return stack, errors.New("additionalProperties are set to both boolean and schema")
+ }
+ if ref := schema.AdditionalProperties.Schema; ref != nil {
+ v := ref.Value
+ if v == nil {
+ return stack, foundUnresolvedRef(ref.Ref)
+ }
+
+ var err error
+ if stack, err = v.validate(ctx, stack); err != nil {
+ return stack, err
+ }
+ }
+
+ if v := schema.ExternalDocs; v != nil {
+ if err := v.Validate(ctx); err != nil {
+ return stack, fmt.Errorf("invalid external docs: %w", err)
+ }
+ }
+
+ if v := schema.Default; v != nil && !validationOpts.schemaDefaultsValidationDisabled {
+ if err := schema.VisitJSON(v); err != nil {
+ return stack, fmt.Errorf("invalid default: %w", err)
+ }
+ }
+
+ if x := schema.Example; x != nil && !validationOpts.examplesValidationDisabled {
+ if err := validateExampleValue(ctx, x, schema); err != nil {
+ return stack, fmt.Errorf("invalid example: %w", err)
+ }
+ }
+
+ return stack, validateExtensions(ctx, schema.Extensions)
+}
+
+func (schema *Schema) IsMatching(value interface{}) bool {
+ settings := newSchemaValidationSettings(FailFast())
+ return schema.visitJSON(settings, value) == nil
+}
+
+func (schema *Schema) IsMatchingJSONBoolean(value bool) bool {
+ settings := newSchemaValidationSettings(FailFast())
+ return schema.visitJSON(settings, value) == nil
+}
+
+func (schema *Schema) IsMatchingJSONNumber(value float64) bool {
+ settings := newSchemaValidationSettings(FailFast())
+ return schema.visitJSON(settings, value) == nil
+}
+
+func (schema *Schema) IsMatchingJSONString(value string) bool {
+ settings := newSchemaValidationSettings(FailFast())
+ return schema.visitJSON(settings, value) == nil
+}
+
+func (schema *Schema) IsMatchingJSONArray(value []interface{}) bool {
+ settings := newSchemaValidationSettings(FailFast())
+ return schema.visitJSON(settings, value) == nil
+}
+
+func (schema *Schema) IsMatchingJSONObject(value map[string]interface{}) bool {
+ settings := newSchemaValidationSettings(FailFast())
+ return schema.visitJSON(settings, value) == nil
+}
+
+func (schema *Schema) VisitJSON(value interface{}, opts ...SchemaValidationOption) error {
+ settings := newSchemaValidationSettings(opts...)
+ return schema.visitJSON(settings, value)
+}
+
+func (schema *Schema) visitJSON(settings *schemaValidationSettings, value interface{}) (err error) {
+ switch value := value.(type) {
+ case nil:
+ // Don't use VisitJSONNull, as we still want to reach 'visitXOFOperations', since
+ // those could allow for a nullable value even though this one doesn't
+ if schema.PermitsNull() {
+ return
+ }
+ case float64:
+ if math.IsNaN(value) {
+ return ErrSchemaInputNaN
+ }
+ if math.IsInf(value, 0) {
+ return ErrSchemaInputInf
+ }
+ }
+
+ if schema.IsEmpty() {
+ switch value.(type) {
+ case nil:
+ return schema.visitJSONNull(settings)
+ default:
+ return
+ }
+ }
+
+ if err = schema.visitNotOperation(settings, value); err != nil {
+ return
+ }
+ var run bool
+ if err, run = schema.visitXOFOperations(settings, value); err != nil || !run {
+ return
+ }
+ if err = schema.visitEnumOperation(settings, value); err != nil {
+ return
+ }
+
+ switch value := value.(type) {
+ case nil:
+ return schema.visitJSONNull(settings)
+ case bool:
+ return schema.visitJSONBoolean(settings, value)
+ case json.Number:
+ valueFloat64, err := value.Float64()
+ if err != nil {
+ return &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "type",
+ Reason: "cannot convert json.Number to float64",
+ customizeMessageError: settings.customizeMessageError,
+ Origin: err,
+ }
+ }
+ return schema.visitJSONNumber(settings, valueFloat64)
+ case int:
+ return schema.visitJSONNumber(settings, float64(value))
+ case int32:
+ return schema.visitJSONNumber(settings, float64(value))
+ case int64:
+ return schema.visitJSONNumber(settings, float64(value))
+ case float64:
+ return schema.visitJSONNumber(settings, value)
+ case string:
+ return schema.visitJSONString(settings, value)
+ case []interface{}:
+ return schema.visitJSONArray(settings, value)
+ case map[string]interface{}:
+ return schema.visitJSONObject(settings, value)
+ case map[interface{}]interface{}: // for YAML cf. issue #444
+ values := make(map[string]interface{}, len(value))
+ for key, v := range value {
+ if k, ok := key.(string); ok {
+ values[k] = v
+ }
+ }
+ if len(value) == len(values) {
+ return schema.visitJSONObject(settings, values)
+ }
+ }
+
+ // Catch slice of non-empty interface type
+ if reflect.TypeOf(value).Kind() == reflect.Slice {
+ valueR := reflect.ValueOf(value)
+ newValue := make([]interface{}, 0, valueR.Len())
+ for i := 0; i < valueR.Len(); i++ {
+ newValue = append(newValue, valueR.Index(i).Interface())
+ }
+ return schema.visitJSONArray(settings, newValue)
+ }
+
+ return &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "type",
+ Reason: fmt.Sprintf("unhandled value of type %T", value),
+ customizeMessageError: settings.customizeMessageError,
+ }
+}
+
+func (schema *Schema) visitEnumOperation(settings *schemaValidationSettings, value interface{}) (err error) {
+ if enum := schema.Enum; len(enum) != 0 {
+ for _, v := range enum {
+ switch c := value.(type) {
+ case json.Number:
+ var f float64
+ if f, err = strconv.ParseFloat(c.String(), 64); err != nil {
+ return err
+ }
+ if v == f {
+ return
+ }
+ case int64:
+ if v == float64(c) {
+ return
+ }
+ default:
+ if reflect.DeepEqual(v, value) {
+ return
+ }
+ }
+ }
+ if settings.failfast {
+ return errSchema
+ }
+ allowedValues, _ := json.Marshal(enum)
+ return &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "enum",
+ Reason: fmt.Sprintf("value is not one of the allowed values %s", string(allowedValues)),
+ customizeMessageError: settings.customizeMessageError,
+ }
+ }
+ return
+}
+
+func (schema *Schema) visitNotOperation(settings *schemaValidationSettings, value interface{}) (err error) {
+ if ref := schema.Not; ref != nil {
+ v := ref.Value
+ if v == nil {
+ return foundUnresolvedRef(ref.Ref)
+ }
+ if err := v.visitJSON(settings, value); err == nil {
+ if settings.failfast {
+ return errSchema
+ }
+ return &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "not",
+ customizeMessageError: settings.customizeMessageError,
+ }
+ }
+ }
+ return
+}
+
+// If the XOF operations pass successfully, abort further run of validation, as they will already be satisfied (unless the schema
+// itself is badly specified
+func (schema *Schema) visitXOFOperations(settings *schemaValidationSettings, value interface{}) (err error, run bool) {
+ var visitedOneOf, visitedAnyOf, visitedAllOf bool
+ if v := schema.OneOf; len(v) > 0 {
+ var discriminatorRef string
+ if schema.Discriminator != nil {
+ pn := schema.Discriminator.PropertyName
+ if valuemap, okcheck := value.(map[string]interface{}); okcheck {
+ discriminatorVal, okcheck := valuemap[pn]
+ if !okcheck {
+ return &SchemaError{
+ Schema: schema,
+ SchemaField: "discriminator",
+ Reason: fmt.Sprintf("input does not contain the discriminator property %q", pn),
+ }, false
+ }
+
+ discriminatorValString, okcheck := discriminatorVal.(string)
+ if !okcheck {
+ return &SchemaError{
+ Value: discriminatorVal,
+ Schema: schema,
+ SchemaField: "discriminator",
+ Reason: fmt.Sprintf("value of discriminator property %q is not a string", pn),
+ }, false
+ }
+
+ if discriminatorRef, okcheck = schema.Discriminator.Mapping[discriminatorValString]; len(schema.Discriminator.Mapping) > 0 && !okcheck {
+ return &SchemaError{
+ Value: discriminatorVal,
+ Schema: schema,
+ SchemaField: "discriminator",
+ Reason: fmt.Sprintf("discriminator property %q has invalid value", pn),
+ }, false
+ }
+ }
+ }
+
+ var (
+ ok = 0
+ validationErrors = multiErrorForOneOf{}
+ matchedOneOfIndices = make([]int, 0)
+ tempValue = value
+ )
+ for idx, item := range v {
+ v := item.Value
+ if v == nil {
+ return foundUnresolvedRef(item.Ref), false
+ }
+
+ if discriminatorRef != "" && discriminatorRef != item.Ref {
+ continue
+ }
+
+ // make a deep copy to protect origin value from being injected default value that defined in mismatched oneOf schema
+ if settings.asreq || settings.asrep {
+ tempValue = deepcopy.Copy(value)
+ }
+
+ if err := v.visitJSON(settings, tempValue); err != nil {
+ validationErrors = append(validationErrors, err)
+ continue
+ }
+
+ matchedOneOfIndices = append(matchedOneOfIndices, idx)
+ ok++
+ }
+
+ if ok != 1 {
+ if settings.failfast {
+ return errSchema, false
+ }
+ e := &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "oneOf",
+ customizeMessageError: settings.customizeMessageError,
+ }
+ if ok > 1 {
+ e.Origin = ErrOneOfConflict
+ e.Reason = fmt.Sprintf(`value matches more than one schema from "oneOf" (matches schemas at indices %v)`, matchedOneOfIndices)
+ } else {
+ e.Origin = fmt.Errorf("doesn't match schema due to: %w", validationErrors)
+ e.Reason = `value doesn't match any schema from "oneOf"`
+ }
+
+ return e, false
+ }
+
+ // run again to inject default value that defined in matched oneOf schema
+ if settings.asreq || settings.asrep {
+ _ = v[matchedOneOfIndices[0]].Value.visitJSON(settings, value)
+ }
+ visitedOneOf = true
+ }
+
+ if v := schema.AnyOf; len(v) > 0 {
+ var (
+ ok = false
+ matchedAnyOfIdx = 0
+ tempValue = value
+ )
+ for idx, item := range v {
+ v := item.Value
+ if v == nil {
+ return foundUnresolvedRef(item.Ref), false
+ }
+ // make a deep copy to protect origin value from being injected default value that defined in mismatched anyOf schema
+ if settings.asreq || settings.asrep {
+ tempValue = deepcopy.Copy(value)
+ }
+ if err := v.visitJSON(settings, tempValue); err == nil {
+ ok = true
+ matchedAnyOfIdx = idx
+ break
+ }
+ }
+ if !ok {
+ if settings.failfast {
+ return errSchema, false
+ }
+ return &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "anyOf",
+ Reason: `doesn't match any schema from "anyOf"`,
+ customizeMessageError: settings.customizeMessageError,
+ }, false
+ }
+
+ _ = v[matchedAnyOfIdx].Value.visitJSON(settings, value)
+ visitedAnyOf = true
+ }
+
+ for _, item := range schema.AllOf {
+ v := item.Value
+ if v == nil {
+ return foundUnresolvedRef(item.Ref), false
+ }
+ if err := v.visitJSON(settings, value); err != nil {
+ if settings.failfast {
+ return errSchema, false
+ }
+ return &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "allOf",
+ Reason: `doesn't match all schemas from "allOf"`,
+ Origin: err,
+ customizeMessageError: settings.customizeMessageError,
+ }, false
+ }
+ visitedAllOf = true
+ }
+
+ run = !((visitedOneOf || visitedAnyOf || visitedAllOf) && value == nil)
+ return
+}
+
+// The value is not considered in visitJSONNull because according to the spec
+// "null is not supported as a type" unless `nullable` is also set to true
+// https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#data-types
+// https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#schema-object
+func (schema *Schema) visitJSONNull(settings *schemaValidationSettings) (err error) {
+ if schema.PermitsNull() {
+ return
+ }
+ if settings.failfast {
+ return errSchema
+ }
+ return &SchemaError{
+ Value: nil,
+ Schema: schema,
+ SchemaField: "nullable",
+ Reason: "Value is not nullable",
+ customizeMessageError: settings.customizeMessageError,
+ }
+}
+
+func (schema *Schema) VisitJSONBoolean(value bool) error {
+ settings := newSchemaValidationSettings()
+ return schema.visitJSONBoolean(settings, value)
+}
+
+func (schema *Schema) visitJSONBoolean(settings *schemaValidationSettings, value bool) (err error) {
+ if !schema.Type.Permits(TypeBoolean) {
+ return schema.expectedType(settings, value)
+ }
+ return
+}
+
+func (schema *Schema) VisitJSONNumber(value float64) error {
+ settings := newSchemaValidationSettings()
+ return schema.visitJSONNumber(settings, value)
+}
+
+func (schema *Schema) visitJSONNumber(settings *schemaValidationSettings, value float64) error {
+ var me MultiError
+ schemaType := schema.Type
+ requireInteger := false
+ if schemaType.Permits(TypeInteger) && !schemaType.Permits(TypeNumber) {
+ requireInteger = true
+ if bigFloat := big.NewFloat(value); !bigFloat.IsInt() {
+ if settings.failfast {
+ return errSchema
+ }
+ err := &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "type",
+ Reason: "value must be an integer",
+ customizeMessageError: settings.customizeMessageError,
+ }
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+ }
+ } else if !(schemaType.Permits(TypeInteger) || schemaType.Permits(TypeNumber)) {
+ return schema.expectedType(settings, value)
+ }
+
+ // formats
+ if requireInteger && schema.Format != "" {
+ formatMin := float64(0)
+ formatMax := float64(0)
+ switch schema.Format {
+ case "int32":
+ formatMin = formatMinInt32
+ formatMax = formatMaxInt32
+ case "int64":
+ formatMin = formatMinInt64
+ formatMax = formatMaxInt64
+ default:
+ if settings.formatValidationEnabled {
+ return unsupportedFormat(schema.Format)
+ }
+ }
+ if formatMin != 0 && formatMax != 0 && !(formatMin <= value && value <= formatMax) {
+ if settings.failfast {
+ return errSchema
+ }
+ err := &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "format",
+ Reason: fmt.Sprintf("number must be an %s", schema.Format),
+ customizeMessageError: settings.customizeMessageError,
+ }
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+ }
+ }
+
+ // "exclusiveMinimum"
+ if v := schema.ExclusiveMin; v && !(*schema.Min < value) {
+ if settings.failfast {
+ return errSchema
+ }
+ err := &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "exclusiveMinimum",
+ Reason: fmt.Sprintf("number must be more than %g", *schema.Min),
+ customizeMessageError: settings.customizeMessageError,
+ }
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+ }
+
+ // "exclusiveMaximum"
+ if v := schema.ExclusiveMax; v && !(*schema.Max > value) {
+ if settings.failfast {
+ return errSchema
+ }
+ err := &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "exclusiveMaximum",
+ Reason: fmt.Sprintf("number must be less than %g", *schema.Max),
+ customizeMessageError: settings.customizeMessageError,
+ }
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+ }
+
+ // "minimum"
+ if v := schema.Min; v != nil && !(*v <= value) {
+ if settings.failfast {
+ return errSchema
+ }
+ err := &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "minimum",
+ Reason: fmt.Sprintf("number must be at least %g", *v),
+ customizeMessageError: settings.customizeMessageError,
+ }
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+ }
+
+ // "maximum"
+ if v := schema.Max; v != nil && !(*v >= value) {
+ if settings.failfast {
+ return errSchema
+ }
+ err := &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "maximum",
+ Reason: fmt.Sprintf("number must be at most %g", *v),
+ customizeMessageError: settings.customizeMessageError,
+ }
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+ }
+
+ // "multipleOf"
+ if v := schema.MultipleOf; v != nil {
+ // "A numeric instance is valid only if division by this keyword's
+ // value results in an integer."
+ if bigFloat := big.NewFloat(value / *v); !bigFloat.IsInt() {
+ if settings.failfast {
+ return errSchema
+ }
+ err := &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "multipleOf",
+ Reason: fmt.Sprintf("number must be a multiple of %g", *v),
+ customizeMessageError: settings.customizeMessageError,
+ }
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+ }
+ }
+
+ if len(me) > 0 {
+ return me
+ }
+
+ return nil
+}
+
+func (schema *Schema) VisitJSONString(value string) error {
+ settings := newSchemaValidationSettings()
+ return schema.visitJSONString(settings, value)
+}
+
+func (schema *Schema) visitJSONString(settings *schemaValidationSettings, value string) error {
+ if !schema.Type.Permits(TypeString) {
+ return schema.expectedType(settings, value)
+ }
+
+ var me MultiError
+
+ // "minLength" and "maxLength"
+ minLength := schema.MinLength
+ maxLength := schema.MaxLength
+ if minLength != 0 || maxLength != nil {
+ // JSON schema string lengths are UTF-16, not UTF-8!
+ length := int64(0)
+ for _, r := range value {
+ if utf16.IsSurrogate(r) {
+ length += 2
+ } else {
+ length++
+ }
+ }
+ if minLength != 0 && length < int64(minLength) {
+ if settings.failfast {
+ return errSchema
+ }
+ err := &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "minLength",
+ Reason: fmt.Sprintf("minimum string length is %d", minLength),
+ customizeMessageError: settings.customizeMessageError,
+ }
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+ }
+ if maxLength != nil && length > int64(*maxLength) {
+ if settings.failfast {
+ return errSchema
+ }
+ err := &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "maxLength",
+ Reason: fmt.Sprintf("maximum string length is %d", *maxLength),
+ customizeMessageError: settings.customizeMessageError,
+ }
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+ }
+ }
+
+ // "pattern"
+ if !settings.patternValidationDisabled && schema.Pattern != "" {
+ cpiface, _ := compiledPatterns.Load(schema.Pattern)
+ cp, _ := cpiface.(*regexp.Regexp)
+ if cp == nil {
+ var err error
+ if cp, err = schema.compilePattern(); err != nil {
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+ }
+ }
+ if !cp.MatchString(value) {
+ err := &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "pattern",
+ Reason: fmt.Sprintf(`string doesn't match the regular expression "%s"`, schema.Pattern),
+ customizeMessageError: settings.customizeMessageError,
+ }
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+ }
+ }
+
+ // "format"
+ var formatStrErr string
+ var formatErr error
+ if format := schema.Format; format != "" {
+ if f, ok := SchemaStringFormats[format]; ok {
+ switch {
+ case f.regexp != nil && f.callback == nil:
+ if cp := f.regexp; !cp.MatchString(value) {
+ formatStrErr = fmt.Sprintf(`string doesn't match the format %q (regular expression "%s")`, format, cp.String())
+ }
+ case f.regexp == nil && f.callback != nil:
+ if err := f.callback(value); err != nil {
+ schemaErr := &SchemaError{}
+ if errors.As(err, &schemaErr) {
+ formatStrErr = fmt.Sprintf(`string doesn't match the format %q (%s)`, format, schemaErr.Reason)
+ } else {
+ formatStrErr = fmt.Sprintf(`string doesn't match the format %q (%v)`, format, err)
+ }
+ formatErr = err
+ }
+ default:
+ formatStrErr = fmt.Sprintf("corrupted entry %q in SchemaStringFormats", format)
+ }
+ }
+ }
+ if formatStrErr != "" || formatErr != nil {
+ err := &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "format",
+ Reason: formatStrErr,
+ Origin: formatErr,
+ customizeMessageError: settings.customizeMessageError,
+ }
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+
+ }
+
+ if len(me) > 0 {
+ return me
+ }
+
+ return nil
+}
+
+func (schema *Schema) VisitJSONArray(value []interface{}) error {
+ settings := newSchemaValidationSettings()
+ return schema.visitJSONArray(settings, value)
+}
+
+func (schema *Schema) visitJSONArray(settings *schemaValidationSettings, value []interface{}) error {
+ if !schema.Type.Permits(TypeArray) {
+ return schema.expectedType(settings, value)
+ }
+
+ var me MultiError
+
+ lenValue := int64(len(value))
+
+ // "minItems"
+ if v := schema.MinItems; v != 0 && lenValue < int64(v) {
+ if settings.failfast {
+ return errSchema
+ }
+ err := &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "minItems",
+ Reason: fmt.Sprintf("minimum number of items is %d", v),
+ customizeMessageError: settings.customizeMessageError,
+ }
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+ }
+
+ // "maxItems"
+ if v := schema.MaxItems; v != nil && lenValue > int64(*v) {
+ if settings.failfast {
+ return errSchema
+ }
+ err := &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "maxItems",
+ Reason: fmt.Sprintf("maximum number of items is %d", *v),
+ customizeMessageError: settings.customizeMessageError,
+ }
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+ }
+
+ // "uniqueItems"
+ if sliceUniqueItemsChecker == nil {
+ sliceUniqueItemsChecker = isSliceOfUniqueItems
+ }
+ if v := schema.UniqueItems; v && !sliceUniqueItemsChecker(value) {
+ if settings.failfast {
+ return errSchema
+ }
+ err := &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "uniqueItems",
+ Reason: "duplicate items found",
+ customizeMessageError: settings.customizeMessageError,
+ }
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+ }
+
+ // "items"
+ if itemSchemaRef := schema.Items; itemSchemaRef != nil {
+ itemSchema := itemSchemaRef.Value
+ if itemSchema == nil {
+ return foundUnresolvedRef(itemSchemaRef.Ref)
+ }
+ for i, item := range value {
+ if err := itemSchema.visitJSON(settings, item); err != nil {
+ err = markSchemaErrorIndex(err, i)
+ if !settings.multiError {
+ return err
+ }
+ if itemMe, ok := err.(MultiError); ok {
+ me = append(me, itemMe...)
+ } else {
+ me = append(me, err)
+ }
+ }
+ }
+ }
+
+ if len(me) > 0 {
+ return me
+ }
+
+ return nil
+}
+
+func (schema *Schema) VisitJSONObject(value map[string]interface{}) error {
+ settings := newSchemaValidationSettings()
+ return schema.visitJSONObject(settings, value)
+}
+
+func (schema *Schema) visitJSONObject(settings *schemaValidationSettings, value map[string]interface{}) error {
+ if !schema.Type.Permits(TypeObject) {
+ return schema.expectedType(settings, value)
+ }
+
+ var me MultiError
+
+ if settings.asreq || settings.asrep {
+ properties := make([]string, 0, len(schema.Properties))
+ for propName := range schema.Properties {
+ properties = append(properties, propName)
+ }
+ sort.Strings(properties)
+ for _, propName := range properties {
+ propSchema := schema.Properties[propName]
+ reqRO := settings.asreq && propSchema.Value.ReadOnly && !settings.readOnlyValidationDisabled
+ repWO := settings.asrep && propSchema.Value.WriteOnly && !settings.writeOnlyValidationDisabled
+
+ if f := settings.defaultsSet; f != nil && value[propName] == nil {
+ if dflt := propSchema.Value.Default; dflt != nil && !reqRO && !repWO {
+ value[propName] = dflt
+ settings.onceSettingDefaults.Do(f)
+ }
+ }
+
+ if value[propName] != nil {
+ if reqRO {
+ me = append(me, fmt.Errorf("readOnly property %q in request", propName))
+ } else if repWO {
+ me = append(me, fmt.Errorf("writeOnly property %q in response", propName))
+ }
+ }
+ }
+ }
+
+ // "properties"
+ properties := schema.Properties
+ lenValue := int64(len(value))
+
+ // "minProperties"
+ if v := schema.MinProps; v != 0 && lenValue < int64(v) {
+ if settings.failfast {
+ return errSchema
+ }
+ err := &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "minProperties",
+ Reason: fmt.Sprintf("there must be at least %d properties", v),
+ customizeMessageError: settings.customizeMessageError,
+ }
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+ }
+
+ // "maxProperties"
+ if v := schema.MaxProps; v != nil && lenValue > int64(*v) {
+ if settings.failfast {
+ return errSchema
+ }
+ err := &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "maxProperties",
+ Reason: fmt.Sprintf("there must be at most %d properties", *v),
+ customizeMessageError: settings.customizeMessageError,
+ }
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+ }
+
+ // "additionalProperties"
+ var additionalProperties *Schema
+ if ref := schema.AdditionalProperties.Schema; ref != nil {
+ additionalProperties = ref.Value
+ }
+ keys := make([]string, 0, len(value))
+ for k := range value {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+ for _, k := range keys {
+ v := value[k]
+ if properties != nil {
+ propertyRef := properties[k]
+ if propertyRef != nil {
+ p := propertyRef.Value
+ if p == nil {
+ return foundUnresolvedRef(propertyRef.Ref)
+ }
+ if err := p.visitJSON(settings, v); err != nil {
+ if settings.failfast {
+ return errSchema
+ }
+ err = markSchemaErrorKey(err, k)
+ if !settings.multiError {
+ return err
+ }
+ if v, ok := err.(MultiError); ok {
+ me = append(me, v...)
+ continue
+ }
+ me = append(me, err)
+ }
+ continue
+ }
+ }
+ if allowed := schema.AdditionalProperties.Has; allowed == nil || *allowed {
+ if additionalProperties != nil {
+ if err := additionalProperties.visitJSON(settings, v); err != nil {
+ if settings.failfast {
+ return errSchema
+ }
+ err = markSchemaErrorKey(err, k)
+ if !settings.multiError {
+ return err
+ }
+ if v, ok := err.(MultiError); ok {
+ me = append(me, v...)
+ continue
+ }
+ me = append(me, err)
+ }
+ }
+ continue
+ }
+ if settings.failfast {
+ return errSchema
+ }
+ err := &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "properties",
+ Reason: fmt.Sprintf("property %q is unsupported", k),
+ customizeMessageError: settings.customizeMessageError,
+ }
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+ }
+
+ // "required"
+ for _, k := range schema.Required {
+ if _, ok := value[k]; !ok {
+ if s := schema.Properties[k]; s != nil && s.Value.ReadOnly && settings.asreq {
+ continue
+ }
+ if s := schema.Properties[k]; s != nil && s.Value.WriteOnly && settings.asrep {
+ continue
+ }
+ if settings.failfast {
+ return errSchema
+ }
+ err := markSchemaErrorKey(&SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "required",
+ Reason: fmt.Sprintf("property %q is missing", k),
+ customizeMessageError: settings.customizeMessageError,
+ }, k)
+ if !settings.multiError {
+ return err
+ }
+ me = append(me, err)
+ }
+ }
+
+ if len(me) > 0 {
+ return me
+ }
+
+ return nil
+}
+
+func (schema *Schema) expectedType(settings *schemaValidationSettings, value interface{}) error {
+ if settings.failfast {
+ return errSchema
+ }
+
+ a := "a"
+ var x string
+ schemaTypes := (*schema.Type)
+ if len(schemaTypes) == 1 {
+ x = schemaTypes[0]
+ switch x {
+ case TypeArray, TypeObject, TypeInteger:
+ a = "an"
+ }
+ } else {
+ a = "one of"
+ x = strings.Join(schemaTypes, ", ")
+ }
+ return &SchemaError{
+ Value: value,
+ Schema: schema,
+ SchemaField: "type",
+ Reason: fmt.Sprintf("value must be %s %s", a, x),
+ customizeMessageError: settings.customizeMessageError,
+ }
+}
+
+// SchemaError is an error that occurs during schema validation.
+type SchemaError struct {
+ // Value is the value that failed validation.
+ Value interface{}
+ // reversePath is the path to the value that failed validation.
+ reversePath []string
+ // Schema is the schema that failed validation.
+ Schema *Schema
+ // SchemaField is the field of the schema that failed validation.
+ SchemaField string
+ // Reason is a human-readable message describing the error.
+ // The message should never include the original value to prevent leakage of potentially sensitive inputs in error messages.
+ Reason string
+ // Origin is the original error that caused this error.
+ Origin error
+ // customizeMessageError is a function that can be used to customize the error message.
+ customizeMessageError func(err *SchemaError) string
+}
+
+var _ interface{ Unwrap() error } = SchemaError{}
+
+func markSchemaErrorKey(err error, key string) error {
+ var me multiErrorForOneOf
+
+ if errors.As(err, &me) {
+ err = me.Unwrap()
+ }
+
+ if v, ok := err.(*SchemaError); ok {
+ v.reversePath = append(v.reversePath, key)
+ return v
+ }
+ if v, ok := err.(MultiError); ok {
+ for _, e := range v {
+ _ = markSchemaErrorKey(e, key)
+ }
+ return v
+ }
+ return err
+}
+
+func markSchemaErrorIndex(err error, index int) error {
+ return markSchemaErrorKey(err, strconv.FormatInt(int64(index), 10))
+}
+
+func (err *SchemaError) JSONPointer() []string {
+ reversePath := err.reversePath
+ path := append([]string(nil), reversePath...)
+ for left, right := 0, len(path)-1; left < right; left, right = left+1, right-1 {
+ path[left], path[right] = path[right], path[left]
+ }
+ return path
+}
+
+func (err *SchemaError) Error() string {
+ if err.customizeMessageError != nil {
+ if msg := err.customizeMessageError(err); msg != "" {
+ return msg
+ }
+ }
+
+ buf := bytes.NewBuffer(make([]byte, 0, 256))
+
+ if len(err.reversePath) > 0 {
+ buf.WriteString(`Error at "`)
+ reversePath := err.reversePath
+ for i := len(reversePath) - 1; i >= 0; i-- {
+ buf.WriteByte('/')
+ buf.WriteString(reversePath[i])
+ }
+ buf.WriteString(`": `)
+ }
+
+ if err.Origin != nil {
+ buf.WriteString(err.Origin.Error())
+
+ return buf.String()
+ }
+
+ reason := err.Reason
+ if reason == "" {
+ buf.WriteString(`Doesn't match schema "`)
+ buf.WriteString(err.SchemaField)
+ buf.WriteString(`"`)
+ } else {
+ buf.WriteString(reason)
+ }
+
+ if !SchemaErrorDetailsDisabled {
+ buf.WriteString("\nSchema:\n ")
+ encoder := json.NewEncoder(buf)
+ encoder.SetIndent(" ", " ")
+ if err := encoder.Encode(err.Schema); err != nil {
+ panic(err)
+ }
+ buf.WriteString("\nValue:\n ")
+ if err := encoder.Encode(err.Value); err != nil {
+ panic(err)
+ }
+ }
+
+ return buf.String()
+}
+
+func (err SchemaError) Unwrap() error {
+ return err.Origin
+}
+
+func isSliceOfUniqueItems(xs []interface{}) bool {
+ s := len(xs)
+ m := make(map[string]struct{}, s)
+ for _, x := range xs {
+ // The input slice is converted from a JSON string, there shall
+ // have no error when convert it back.
+ key, _ := json.Marshal(&x)
+ m[string(key)] = struct{}{}
+ }
+ return s == len(m)
+}
+
+// SliceUniqueItemsChecker is an function used to check if an given slice
+// have unique items.
+type SliceUniqueItemsChecker func(items []interface{}) bool
+
+// By default using predefined func isSliceOfUniqueItems which make use of
+// json.Marshal to generate a key for map used to check if a given slice
+// have unique items.
+var sliceUniqueItemsChecker SliceUniqueItemsChecker = isSliceOfUniqueItems
+
+// RegisterArrayUniqueItemsChecker is used to register a customized function
+// used to check if JSON array have unique items.
+func RegisterArrayUniqueItemsChecker(fn SliceUniqueItemsChecker) {
+ sliceUniqueItemsChecker = fn
+}
+
+func unsupportedFormat(format string) error {
+ return fmt.Errorf("unsupported 'format' value %q", format)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/schema_formats.go b/vendor/github.com/getkin/kin-openapi/openapi3/schema_formats.go
new file mode 100644
index 00000000..ea38400c
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/schema_formats.go
@@ -0,0 +1,106 @@
+package openapi3
+
+import (
+ "fmt"
+ "net"
+ "regexp"
+ "strings"
+)
+
+const (
+ // FormatOfStringForUUIDOfRFC4122 is an optional predefined format for UUID v1-v5 as specified by RFC4122
+ FormatOfStringForUUIDOfRFC4122 = `^(?:[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000)$`
+
+ // FormatOfStringForEmail pattern catches only some suspiciously wrong-looking email addresses.
+ // Use DefineStringFormat(...) if you need something stricter.
+ FormatOfStringForEmail = `^[^@]+@[^@<>",\s]+$`
+)
+
+// FormatCallback performs custom checks on exotic formats
+type FormatCallback func(value string) error
+
+// Format represents a format validator registered by either DefineStringFormat or DefineStringFormatCallback
+type Format struct {
+ regexp *regexp.Regexp
+ callback FormatCallback
+}
+
+// SchemaStringFormats allows for validating string formats
+var SchemaStringFormats = make(map[string]Format, 4)
+
+// DefineStringFormat defines a new regexp pattern for a given format
+func DefineStringFormat(name string, pattern string) {
+ re, err := regexp.Compile(pattern)
+ if err != nil {
+ err := fmt.Errorf("format %q has invalid pattern %q: %w", name, pattern, err)
+ panic(err)
+ }
+ SchemaStringFormats[name] = Format{regexp: re}
+}
+
+// DefineStringFormatCallback adds a validation function for a specific schema format entry
+func DefineStringFormatCallback(name string, callback FormatCallback) {
+ SchemaStringFormats[name] = Format{callback: callback}
+}
+
+func validateIP(ip string) error {
+ parsed := net.ParseIP(ip)
+ if parsed == nil {
+ return &SchemaError{
+ Value: ip,
+ Reason: "Not an IP address",
+ }
+ }
+ return nil
+}
+
+func validateIPv4(ip string) error {
+ if err := validateIP(ip); err != nil {
+ return err
+ }
+
+ if !(strings.Count(ip, ":") < 2) {
+ return &SchemaError{
+ Value: ip,
+ Reason: "Not an IPv4 address (it's IPv6)",
+ }
+ }
+ return nil
+}
+
+func validateIPv6(ip string) error {
+ if err := validateIP(ip); err != nil {
+ return err
+ }
+
+ if !(strings.Count(ip, ":") >= 2) {
+ return &SchemaError{
+ Value: ip,
+ Reason: "Not an IPv6 address (it's IPv4)",
+ }
+ }
+ return nil
+}
+
+func init() {
+ // Base64
+ // The pattern supports base64 and b./ase64url. Padding ('=') is supported.
+ DefineStringFormat("byte", `(^$|^[a-zA-Z0-9+/\-_]*=*$)`)
+
+ // date
+ DefineStringFormat("date", `^[0-9]{4}-(0[0-9]|10|11|12)-([0-2][0-9]|30|31)$`)
+
+ // date-time
+ DefineStringFormat("date-time", `^[0-9]{4}-(0[0-9]|10|11|12)-([0-2][0-9]|30|31)T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?(Z|(\+|-)[0-9]{2}:[0-9]{2})?$`)
+
+}
+
+// DefineIPv4Format opts in ipv4 format validation on top of OAS 3 spec
+func DefineIPv4Format() {
+ DefineStringFormatCallback("ipv4", validateIPv4)
+}
+
+// DefineIPv6Format opts in ipv6 format validation on top of OAS 3 spec
+func DefineIPv6Format() {
+ DefineStringFormatCallback("ipv6", validateIPv6)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/schema_pattern.go b/vendor/github.com/getkin/kin-openapi/openapi3/schema_pattern.go
new file mode 100644
index 00000000..4794b6a0
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/schema_pattern.go
@@ -0,0 +1,29 @@
+package openapi3
+
+import (
+ "fmt"
+ "regexp"
+)
+
+var patRewriteCodepoints = regexp.MustCompile(`(?P[0-9A-F]{4})`)
+
+// See https://pkg.go.dev/regexp/syntax
+func intoGoRegexp(re string) string {
+ return patRewriteCodepoints.ReplaceAllString(re, `\x{${code}}`)
+}
+
+// NOTE: racey WRT [writes to schema.Pattern] vs [reads schema.Pattern then writes to compiledPatterns]
+func (schema *Schema) compilePattern() (cp *regexp.Regexp, err error) {
+ pattern := schema.Pattern
+ if cp, err = regexp.Compile(intoGoRegexp(pattern)); err != nil {
+ err = &SchemaError{
+ Schema: schema,
+ SchemaField: "pattern",
+ Origin: err,
+ Reason: fmt.Sprintf("cannot compile pattern %q: %v", pattern, err),
+ }
+ return
+ }
+ var _ bool = compiledPatterns.CompareAndSwap(pattern, nil, cp)
+ return
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/schema_validation_settings.go b/vendor/github.com/getkin/kin-openapi/openapi3/schema_validation_settings.go
new file mode 100644
index 00000000..17aad2fa
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/schema_validation_settings.go
@@ -0,0 +1,79 @@
+package openapi3
+
+import (
+ "sync"
+)
+
+// SchemaValidationOption describes options a user has when validating request / response bodies.
+type SchemaValidationOption func(*schemaValidationSettings)
+
+type schemaValidationSettings struct {
+ failfast bool
+ multiError bool
+ asreq, asrep bool // exclusive (XOR) fields
+ formatValidationEnabled bool
+ patternValidationDisabled bool
+ readOnlyValidationDisabled bool
+ writeOnlyValidationDisabled bool
+
+ onceSettingDefaults sync.Once
+ defaultsSet func()
+
+ customizeMessageError func(err *SchemaError) string
+}
+
+// FailFast returns schema validation errors quicker.
+func FailFast() SchemaValidationOption {
+ return func(s *schemaValidationSettings) { s.failfast = true }
+}
+
+func MultiErrors() SchemaValidationOption {
+ return func(s *schemaValidationSettings) { s.multiError = true }
+}
+
+func VisitAsRequest() SchemaValidationOption {
+ return func(s *schemaValidationSettings) { s.asreq, s.asrep = true, false }
+}
+
+func VisitAsResponse() SchemaValidationOption {
+ return func(s *schemaValidationSettings) { s.asreq, s.asrep = false, true }
+}
+
+// EnableFormatValidation setting makes Validate not return an error when validating documents that mention schema formats that are not defined by the OpenAPIv3 specification.
+func EnableFormatValidation() SchemaValidationOption {
+ return func(s *schemaValidationSettings) { s.formatValidationEnabled = true }
+}
+
+// DisablePatternValidation setting makes Validate not return an error when validating patterns that are not supported by the Go regexp engine.
+func DisablePatternValidation() SchemaValidationOption {
+ return func(s *schemaValidationSettings) { s.patternValidationDisabled = true }
+}
+
+// DisableReadOnlyValidation setting makes Validate not return an error when validating properties marked as read-only
+func DisableReadOnlyValidation() SchemaValidationOption {
+ return func(s *schemaValidationSettings) { s.readOnlyValidationDisabled = true }
+}
+
+// DisableWriteOnlyValidation setting makes Validate not return an error when validating properties marked as write-only
+func DisableWriteOnlyValidation() SchemaValidationOption {
+ return func(s *schemaValidationSettings) { s.writeOnlyValidationDisabled = true }
+}
+
+// DefaultsSet executes the given callback (once) IFF schema validation set default values.
+func DefaultsSet(f func()) SchemaValidationOption {
+ return func(s *schemaValidationSettings) { s.defaultsSet = f }
+}
+
+// SetSchemaErrorMessageCustomizer allows to override the schema error message.
+// If the passed function returns an empty string, it returns to the previous Error() implementation.
+func SetSchemaErrorMessageCustomizer(f func(err *SchemaError) string) SchemaValidationOption {
+ return func(s *schemaValidationSettings) { s.customizeMessageError = f }
+}
+
+func newSchemaValidationSettings(opts ...SchemaValidationOption) *schemaValidationSettings {
+ settings := &schemaValidationSettings{}
+ for _, opt := range opts {
+ opt(settings)
+ }
+ return settings
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/security_requirements.go b/vendor/github.com/getkin/kin-openapi/openapi3/security_requirements.go
new file mode 100644
index 00000000..87891c95
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/security_requirements.go
@@ -0,0 +1,51 @@
+package openapi3
+
+import (
+ "context"
+)
+
+type SecurityRequirements []SecurityRequirement
+
+func NewSecurityRequirements() *SecurityRequirements {
+ return &SecurityRequirements{}
+}
+
+func (srs *SecurityRequirements) With(securityRequirement SecurityRequirement) *SecurityRequirements {
+ *srs = append(*srs, securityRequirement)
+ return srs
+}
+
+// Validate returns an error if SecurityRequirements does not comply with the OpenAPI spec.
+func (srs SecurityRequirements) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ for _, security := range srs {
+ if err := security.Validate(ctx); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// SecurityRequirement is specified by OpenAPI/Swagger standard version 3.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#security-requirement-object
+type SecurityRequirement map[string][]string
+
+func NewSecurityRequirement() SecurityRequirement {
+ return make(SecurityRequirement)
+}
+
+func (security SecurityRequirement) Authenticate(provider string, scopes ...string) SecurityRequirement {
+ if len(scopes) == 0 {
+ scopes = []string{} // Forces the variable to be encoded as an array instead of null
+ }
+ security[provider] = scopes
+ return security
+}
+
+// Validate returns an error if SecurityRequirement does not comply with the OpenAPI spec.
+func (security *SecurityRequirement) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ return nil
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/security_scheme.go b/vendor/github.com/getkin/kin-openapi/openapi3/security_scheme.go
new file mode 100644
index 00000000..c07bfb61
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/security_scheme.go
@@ -0,0 +1,402 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net/url"
+)
+
+// SecurityScheme is specified by OpenAPI/Swagger standard version 3.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#security-scheme-object
+type SecurityScheme struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ Type string `json:"type,omitempty" yaml:"type,omitempty"`
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ Name string `json:"name,omitempty" yaml:"name,omitempty"`
+ In string `json:"in,omitempty" yaml:"in,omitempty"`
+ Scheme string `json:"scheme,omitempty" yaml:"scheme,omitempty"`
+ BearerFormat string `json:"bearerFormat,omitempty" yaml:"bearerFormat,omitempty"`
+ Flows *OAuthFlows `json:"flows,omitempty" yaml:"flows,omitempty"`
+ OpenIdConnectUrl string `json:"openIdConnectUrl,omitempty" yaml:"openIdConnectUrl,omitempty"`
+}
+
+func NewSecurityScheme() *SecurityScheme {
+ return &SecurityScheme{}
+}
+
+func NewCSRFSecurityScheme() *SecurityScheme {
+ return &SecurityScheme{
+ Type: "apiKey",
+ In: "header",
+ Name: "X-XSRF-TOKEN",
+ }
+}
+
+func NewOIDCSecurityScheme(oidcUrl string) *SecurityScheme {
+ return &SecurityScheme{
+ Type: "openIdConnect",
+ OpenIdConnectUrl: oidcUrl,
+ }
+}
+
+func NewJWTSecurityScheme() *SecurityScheme {
+ return &SecurityScheme{
+ Type: "http",
+ Scheme: "bearer",
+ BearerFormat: "JWT",
+ }
+}
+
+// MarshalJSON returns the JSON encoding of SecurityScheme.
+func (ss SecurityScheme) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 8+len(ss.Extensions))
+ for k, v := range ss.Extensions {
+ m[k] = v
+ }
+ if x := ss.Type; x != "" {
+ m["type"] = x
+ }
+ if x := ss.Description; x != "" {
+ m["description"] = x
+ }
+ if x := ss.Name; x != "" {
+ m["name"] = x
+ }
+ if x := ss.In; x != "" {
+ m["in"] = x
+ }
+ if x := ss.Scheme; x != "" {
+ m["scheme"] = x
+ }
+ if x := ss.BearerFormat; x != "" {
+ m["bearerFormat"] = x
+ }
+ if x := ss.Flows; x != nil {
+ m["flows"] = x
+ }
+ if x := ss.OpenIdConnectUrl; x != "" {
+ m["openIdConnectUrl"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets SecurityScheme to a copy of data.
+func (ss *SecurityScheme) UnmarshalJSON(data []byte) error {
+ type SecuritySchemeBis SecurityScheme
+ var x SecuritySchemeBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "type")
+ delete(x.Extensions, "description")
+ delete(x.Extensions, "name")
+ delete(x.Extensions, "in")
+ delete(x.Extensions, "scheme")
+ delete(x.Extensions, "bearerFormat")
+ delete(x.Extensions, "flows")
+ delete(x.Extensions, "openIdConnectUrl")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *ss = SecurityScheme(x)
+ return nil
+}
+
+func (ss *SecurityScheme) WithType(value string) *SecurityScheme {
+ ss.Type = value
+ return ss
+}
+
+func (ss *SecurityScheme) WithDescription(value string) *SecurityScheme {
+ ss.Description = value
+ return ss
+}
+
+func (ss *SecurityScheme) WithName(value string) *SecurityScheme {
+ ss.Name = value
+ return ss
+}
+
+func (ss *SecurityScheme) WithIn(value string) *SecurityScheme {
+ ss.In = value
+ return ss
+}
+
+func (ss *SecurityScheme) WithScheme(value string) *SecurityScheme {
+ ss.Scheme = value
+ return ss
+}
+
+func (ss *SecurityScheme) WithBearerFormat(value string) *SecurityScheme {
+ ss.BearerFormat = value
+ return ss
+}
+
+// Validate returns an error if SecurityScheme does not comply with the OpenAPI spec.
+func (ss *SecurityScheme) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ hasIn := false
+ hasBearerFormat := false
+ hasFlow := false
+ switch ss.Type {
+ case "apiKey":
+ hasIn = true
+ case "http":
+ scheme := ss.Scheme
+ switch scheme {
+ case "bearer":
+ hasBearerFormat = true
+ case "basic", "negotiate", "digest":
+ default:
+ return fmt.Errorf("security scheme of type 'http' has invalid 'scheme' value %q", scheme)
+ }
+ case "oauth2":
+ hasFlow = true
+ case "openIdConnect":
+ if ss.OpenIdConnectUrl == "" {
+ return fmt.Errorf("no OIDC URL found for openIdConnect security scheme %q", ss.Name)
+ }
+ default:
+ return fmt.Errorf("security scheme 'type' can't be %q", ss.Type)
+ }
+
+ // Validate "in" and "name"
+ if hasIn {
+ switch ss.In {
+ case "query", "header", "cookie":
+ default:
+ return fmt.Errorf("security scheme of type 'apiKey' should have 'in'. It can be 'query', 'header' or 'cookie', not %q", ss.In)
+ }
+ if ss.Name == "" {
+ return errors.New("security scheme of type 'apiKey' should have 'name'")
+ }
+ } else if len(ss.In) > 0 {
+ return fmt.Errorf("security scheme of type %q can't have 'in'", ss.Type)
+ } else if len(ss.Name) > 0 {
+ return fmt.Errorf("security scheme of type %q can't have 'name'", ss.Type)
+ }
+
+ // Validate "format"
+ // "bearerFormat" is an arbitrary string so we only check if the scheme supports it
+ if !hasBearerFormat && len(ss.BearerFormat) > 0 {
+ return fmt.Errorf("security scheme of type %q can't have 'bearerFormat'", ss.Type)
+ }
+
+ // Validate "flow"
+ if hasFlow {
+ flow := ss.Flows
+ if flow == nil {
+ return fmt.Errorf("security scheme of type %q should have 'flows'", ss.Type)
+ }
+ if err := flow.Validate(ctx); err != nil {
+ return fmt.Errorf("security scheme 'flow' is invalid: %w", err)
+ }
+ } else if ss.Flows != nil {
+ return fmt.Errorf("security scheme of type %q can't have 'flows'", ss.Type)
+ }
+
+ return validateExtensions(ctx, ss.Extensions)
+}
+
+// OAuthFlows is specified by OpenAPI/Swagger standard version 3.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#oauth-flows-object
+type OAuthFlows struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ Implicit *OAuthFlow `json:"implicit,omitempty" yaml:"implicit,omitempty"`
+ Password *OAuthFlow `json:"password,omitempty" yaml:"password,omitempty"`
+ ClientCredentials *OAuthFlow `json:"clientCredentials,omitempty" yaml:"clientCredentials,omitempty"`
+ AuthorizationCode *OAuthFlow `json:"authorizationCode,omitempty" yaml:"authorizationCode,omitempty"`
+}
+
+type oAuthFlowType int
+
+const (
+ oAuthFlowTypeImplicit oAuthFlowType = iota
+ oAuthFlowTypePassword
+ oAuthFlowTypeClientCredentials
+ oAuthFlowAuthorizationCode
+)
+
+// MarshalJSON returns the JSON encoding of OAuthFlows.
+func (flows OAuthFlows) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 4+len(flows.Extensions))
+ for k, v := range flows.Extensions {
+ m[k] = v
+ }
+ if x := flows.Implicit; x != nil {
+ m["implicit"] = x
+ }
+ if x := flows.Password; x != nil {
+ m["password"] = x
+ }
+ if x := flows.ClientCredentials; x != nil {
+ m["clientCredentials"] = x
+ }
+ if x := flows.AuthorizationCode; x != nil {
+ m["authorizationCode"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets OAuthFlows to a copy of data.
+func (flows *OAuthFlows) UnmarshalJSON(data []byte) error {
+ type OAuthFlowsBis OAuthFlows
+ var x OAuthFlowsBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "implicit")
+ delete(x.Extensions, "password")
+ delete(x.Extensions, "clientCredentials")
+ delete(x.Extensions, "authorizationCode")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *flows = OAuthFlows(x)
+ return nil
+}
+
+// Validate returns an error if OAuthFlows does not comply with the OpenAPI spec.
+func (flows *OAuthFlows) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if v := flows.Implicit; v != nil {
+ if err := v.validate(ctx, oAuthFlowTypeImplicit, opts...); err != nil {
+ return fmt.Errorf("the OAuth flow 'implicit' is invalid: %w", err)
+ }
+ }
+
+ if v := flows.Password; v != nil {
+ if err := v.validate(ctx, oAuthFlowTypePassword, opts...); err != nil {
+ return fmt.Errorf("the OAuth flow 'password' is invalid: %w", err)
+ }
+ }
+
+ if v := flows.ClientCredentials; v != nil {
+ if err := v.validate(ctx, oAuthFlowTypeClientCredentials, opts...); err != nil {
+ return fmt.Errorf("the OAuth flow 'clientCredentials' is invalid: %w", err)
+ }
+ }
+
+ if v := flows.AuthorizationCode; v != nil {
+ if err := v.validate(ctx, oAuthFlowAuthorizationCode, opts...); err != nil {
+ return fmt.Errorf("the OAuth flow 'authorizationCode' is invalid: %w", err)
+ }
+ }
+
+ return validateExtensions(ctx, flows.Extensions)
+}
+
+// OAuthFlow is specified by OpenAPI/Swagger standard version 3.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#oauth-flow-object
+type OAuthFlow struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ AuthorizationURL string `json:"authorizationUrl,omitempty" yaml:"authorizationUrl,omitempty"`
+ TokenURL string `json:"tokenUrl,omitempty" yaml:"tokenUrl,omitempty"`
+ RefreshURL string `json:"refreshUrl,omitempty" yaml:"refreshUrl,omitempty"`
+ Scopes map[string]string `json:"scopes" yaml:"scopes"` // required
+}
+
+// MarshalJSON returns the JSON encoding of OAuthFlow.
+func (flow OAuthFlow) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 4+len(flow.Extensions))
+ for k, v := range flow.Extensions {
+ m[k] = v
+ }
+ if x := flow.AuthorizationURL; x != "" {
+ m["authorizationUrl"] = x
+ }
+ if x := flow.TokenURL; x != "" {
+ m["tokenUrl"] = x
+ }
+ if x := flow.RefreshURL; x != "" {
+ m["refreshUrl"] = x
+ }
+ m["scopes"] = flow.Scopes
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets OAuthFlow to a copy of data.
+func (flow *OAuthFlow) UnmarshalJSON(data []byte) error {
+ type OAuthFlowBis OAuthFlow
+ var x OAuthFlowBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "authorizationUrl")
+ delete(x.Extensions, "tokenUrl")
+ delete(x.Extensions, "refreshUrl")
+ delete(x.Extensions, "scopes")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *flow = OAuthFlow(x)
+ return nil
+}
+
+// Validate returns an error if OAuthFlows does not comply with the OpenAPI spec.
+func (flow *OAuthFlow) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if v := flow.RefreshURL; v != "" {
+ if _, err := url.Parse(v); err != nil {
+ return fmt.Errorf("field 'refreshUrl' is invalid: %w", err)
+ }
+ }
+
+ if flow.Scopes == nil {
+ return errors.New("field 'scopes' is missing")
+ }
+
+ return validateExtensions(ctx, flow.Extensions)
+}
+
+func (flow *OAuthFlow) validate(ctx context.Context, typ oAuthFlowType, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ typeIn := func(types ...oAuthFlowType) bool {
+ for _, ty := range types {
+ if ty == typ {
+ return true
+ }
+ }
+ return false
+ }
+
+ if in := typeIn(oAuthFlowTypeImplicit, oAuthFlowAuthorizationCode); true {
+ switch {
+ case flow.AuthorizationURL == "" && in:
+ return errors.New("field 'authorizationUrl' is empty or missing")
+ case flow.AuthorizationURL != "" && !in:
+ return errors.New("field 'authorizationUrl' should not be set")
+ case flow.AuthorizationURL != "":
+ if _, err := url.Parse(flow.AuthorizationURL); err != nil {
+ return fmt.Errorf("field 'authorizationUrl' is invalid: %w", err)
+ }
+ }
+ }
+
+ if in := typeIn(oAuthFlowTypePassword, oAuthFlowTypeClientCredentials, oAuthFlowAuthorizationCode); true {
+ switch {
+ case flow.TokenURL == "" && in:
+ return errors.New("field 'tokenUrl' is empty or missing")
+ case flow.TokenURL != "" && !in:
+ return errors.New("field 'tokenUrl' should not be set")
+ case flow.TokenURL != "":
+ if _, err := url.Parse(flow.TokenURL); err != nil {
+ return fmt.Errorf("field 'tokenUrl' is invalid: %w", err)
+ }
+ }
+ }
+
+ return flow.Validate(ctx, opts...)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/serialization_method.go b/vendor/github.com/getkin/kin-openapi/openapi3/serialization_method.go
new file mode 100644
index 00000000..2ec8bd2d
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/serialization_method.go
@@ -0,0 +1,17 @@
+package openapi3
+
+const (
+ SerializationSimple = "simple"
+ SerializationLabel = "label"
+ SerializationMatrix = "matrix"
+ SerializationForm = "form"
+ SerializationSpaceDelimited = "spaceDelimited"
+ SerializationPipeDelimited = "pipeDelimited"
+ SerializationDeepObject = "deepObject"
+)
+
+// SerializationMethod describes a serialization method of HTTP request's parameters and body.
+type SerializationMethod struct {
+ Style string
+ Explode bool
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/server.go b/vendor/github.com/getkin/kin-openapi/openapi3/server.go
new file mode 100644
index 00000000..04e233d5
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/server.go
@@ -0,0 +1,284 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "math"
+ "net/url"
+ "sort"
+ "strings"
+)
+
+// Servers is specified by OpenAPI/Swagger standard version 3.
+type Servers []*Server
+
+// Validate returns an error if Servers does not comply with the OpenAPI spec.
+func (servers Servers) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ for _, v := range servers {
+ if err := v.Validate(ctx); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// BasePath returns the base path of the first server in the list, or /.
+func (servers Servers) BasePath() (string, error) {
+ for _, server := range servers {
+ return server.BasePath()
+ }
+ return "/", nil
+}
+
+func (servers Servers) MatchURL(parsedURL *url.URL) (*Server, []string, string) {
+ rawURL := parsedURL.String()
+ if i := strings.IndexByte(rawURL, '?'); i >= 0 {
+ rawURL = rawURL[:i]
+ }
+ for _, server := range servers {
+ pathParams, remaining, ok := server.MatchRawURL(rawURL)
+ if ok {
+ return server, pathParams, remaining
+ }
+ }
+ return nil, nil, ""
+}
+
+// Server is specified by OpenAPI/Swagger standard version 3.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#server-object
+type Server struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ URL string `json:"url" yaml:"url"` // Required
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ Variables map[string]*ServerVariable `json:"variables,omitempty" yaml:"variables,omitempty"`
+}
+
+// BasePath returns the base path extracted from the default values of variables, if any.
+// Assumes a valid struct (per Validate()).
+func (server *Server) BasePath() (string, error) {
+ if server == nil {
+ return "/", nil
+ }
+
+ uri := server.URL
+ for name, svar := range server.Variables {
+ uri = strings.ReplaceAll(uri, "{"+name+"}", svar.Default)
+ }
+
+ u, err := url.ParseRequestURI(uri)
+ if err != nil {
+ return "", err
+ }
+
+ if bp := u.Path; bp != "" {
+ return bp, nil
+ }
+
+ return "/", nil
+}
+
+// MarshalJSON returns the JSON encoding of Server.
+func (server Server) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 3+len(server.Extensions))
+ for k, v := range server.Extensions {
+ m[k] = v
+ }
+ m["url"] = server.URL
+ if x := server.Description; x != "" {
+ m["description"] = x
+ }
+ if x := server.Variables; len(x) != 0 {
+ m["variables"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets Server to a copy of data.
+func (server *Server) UnmarshalJSON(data []byte) error {
+ type ServerBis Server
+ var x ServerBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "url")
+ delete(x.Extensions, "description")
+ delete(x.Extensions, "variables")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *server = Server(x)
+ return nil
+}
+
+func (server Server) ParameterNames() ([]string, error) {
+ pattern := server.URL
+ var params []string
+ for len(pattern) > 0 {
+ i := strings.IndexByte(pattern, '{')
+ if i < 0 {
+ break
+ }
+ pattern = pattern[i+1:]
+ i = strings.IndexByte(pattern, '}')
+ if i < 0 {
+ return nil, errors.New("missing '}'")
+ }
+ params = append(params, strings.TrimSpace(pattern[:i]))
+ pattern = pattern[i+1:]
+ }
+ return params, nil
+}
+
+func (server Server) MatchRawURL(input string) ([]string, string, bool) {
+ pattern := server.URL
+ var params []string
+ for len(pattern) > 0 {
+ c := pattern[0]
+ if len(pattern) == 1 && c == '/' {
+ break
+ }
+ if c == '{' {
+ // Find end of pattern
+ i := strings.IndexByte(pattern, '}')
+ if i < 0 {
+ return nil, "", false
+ }
+ pattern = pattern[i+1:]
+
+ // Find next matching pattern character or next '/' whichever comes first
+ np := -1
+ if len(pattern) > 0 {
+ np = strings.IndexByte(input, pattern[0])
+ }
+ ns := strings.IndexByte(input, '/')
+
+ if np < 0 {
+ i = ns
+ } else if ns < 0 {
+ i = np
+ } else {
+ i = int(math.Min(float64(np), float64(ns)))
+ }
+ if i < 0 {
+ i = len(input)
+ }
+ params = append(params, input[:i])
+ input = input[i:]
+ continue
+ }
+ if len(input) == 0 || input[0] != c {
+ return nil, "", false
+ }
+ pattern = pattern[1:]
+ input = input[1:]
+ }
+ if input == "" {
+ input = "/"
+ }
+ if input[0] != '/' {
+ return nil, "", false
+ }
+ return params, input, true
+}
+
+// Validate returns an error if Server does not comply with the OpenAPI spec.
+func (server *Server) Validate(ctx context.Context, opts ...ValidationOption) (err error) {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if server.URL == "" {
+ return errors.New("value of url must be a non-empty string")
+ }
+
+ opening, closing := strings.Count(server.URL, "{"), strings.Count(server.URL, "}")
+ if opening != closing {
+ return errors.New("server URL has mismatched { and }")
+ }
+
+ if opening != len(server.Variables) {
+ return errors.New("server has undeclared variables")
+ }
+
+ variables := make([]string, 0, len(server.Variables))
+ for name := range server.Variables {
+ variables = append(variables, name)
+ }
+ sort.Strings(variables)
+ for _, name := range variables {
+ v := server.Variables[name]
+ if !strings.Contains(server.URL, "{"+name+"}") {
+ return errors.New("server has undeclared variables")
+ }
+ if err = v.Validate(ctx); err != nil {
+ return
+ }
+ }
+
+ return validateExtensions(ctx, server.Extensions)
+}
+
+// ServerVariable is specified by OpenAPI/Swagger standard version 3.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#server-variable-object
+type ServerVariable struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ Enum []string `json:"enum,omitempty" yaml:"enum,omitempty"`
+ Default string `json:"default,omitempty" yaml:"default,omitempty"`
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+}
+
+// MarshalJSON returns the JSON encoding of ServerVariable.
+func (serverVariable ServerVariable) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 4+len(serverVariable.Extensions))
+ for k, v := range serverVariable.Extensions {
+ m[k] = v
+ }
+ if x := serverVariable.Enum; len(x) != 0 {
+ m["enum"] = x
+ }
+ if x := serverVariable.Default; x != "" {
+ m["default"] = x
+ }
+ if x := serverVariable.Description; x != "" {
+ m["description"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets ServerVariable to a copy of data.
+func (serverVariable *ServerVariable) UnmarshalJSON(data []byte) error {
+ type ServerVariableBis ServerVariable
+ var x ServerVariableBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "enum")
+ delete(x.Extensions, "default")
+ delete(x.Extensions, "description")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *serverVariable = ServerVariable(x)
+ return nil
+}
+
+// Validate returns an error if ServerVariable does not comply with the OpenAPI spec.
+func (serverVariable *ServerVariable) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if serverVariable.Default == "" {
+ data, err := serverVariable.MarshalJSON()
+ if err != nil {
+ return err
+ }
+ return fmt.Errorf("field default is required in %s", data)
+ }
+
+ return validateExtensions(ctx, serverVariable.Extensions)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/tag.go b/vendor/github.com/getkin/kin-openapi/openapi3/tag.go
new file mode 100644
index 00000000..eea6462f
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/tag.go
@@ -0,0 +1,90 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+)
+
+// Tags is specified by OpenAPI/Swagger 3.0 standard.
+type Tags []*Tag
+
+func (tags Tags) Get(name string) *Tag {
+ for _, tag := range tags {
+ if tag.Name == name {
+ return tag
+ }
+ }
+ return nil
+}
+
+// Validate returns an error if Tags does not comply with the OpenAPI spec.
+func (tags Tags) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ for _, v := range tags {
+ if err := v.Validate(ctx); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// Tag is specified by OpenAPI/Swagger 3.0 standard.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#tag-object
+type Tag struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ Name string `json:"name,omitempty" yaml:"name,omitempty"`
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ ExternalDocs *ExternalDocs `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"`
+}
+
+// MarshalJSON returns the JSON encoding of Tag.
+func (t Tag) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 3+len(t.Extensions))
+ for k, v := range t.Extensions {
+ m[k] = v
+ }
+ if x := t.Name; x != "" {
+ m["name"] = x
+ }
+ if x := t.Description; x != "" {
+ m["description"] = x
+ }
+ if x := t.ExternalDocs; x != nil {
+ m["externalDocs"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets Tag to a copy of data.
+func (t *Tag) UnmarshalJSON(data []byte) error {
+ type TagBis Tag
+ var x TagBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "name")
+ delete(x.Extensions, "description")
+ delete(x.Extensions, "externalDocs")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *t = Tag(x)
+ return nil
+}
+
+// Validate returns an error if Tag does not comply with the OpenAPI spec.
+func (t *Tag) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ if v := t.ExternalDocs; v != nil {
+ if err := v.Validate(ctx); err != nil {
+ return fmt.Errorf("invalid external docs: %w", err)
+ }
+ }
+
+ return validateExtensions(ctx, t.Extensions)
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/base.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/base.yml
new file mode 100644
index 00000000..ff8240eb
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/base.yml
@@ -0,0 +1,16 @@
+openapi: "3.0.3"
+info:
+ title: Recursive cyclic refs example
+ version: "1.0"
+components:
+ schemas:
+ Foo:
+ properties:
+ foo2:
+ $ref: "other.yml#/components/schemas/Foo2"
+ bar:
+ $ref: "#/components/schemas/Bar"
+ Bar:
+ properties:
+ foo:
+ $ref: "#/components/schemas/Foo"
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/other.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/other.yml
new file mode 100644
index 00000000..29b72d98
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/other.yml
@@ -0,0 +1,10 @@
+openapi: "3.0.3"
+info:
+ title: Recursive cyclic refs example
+ version: "1.0"
+components:
+ schemas:
+ Foo2:
+ properties:
+ id:
+ type: string
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Bar.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Bar.yml
new file mode 100644
index 00000000..cc59fc27
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Bar.yml
@@ -0,0 +1,2 @@
+type: string
+example: bar
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Cat.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Cat.yml
new file mode 100644
index 00000000..c476aa1a
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Cat.yml
@@ -0,0 +1,4 @@
+type: object
+properties:
+ cat:
+ $ref: ../openapi.yml#/components/schemas/Cat
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo.yml
new file mode 100644
index 00000000..53a23366
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo.yml
@@ -0,0 +1,4 @@
+type: object
+properties:
+ bar:
+ $ref: ../openapi.yml#/components/schemas/Bar
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo/Foo2.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo/Foo2.yml
new file mode 100644
index 00000000..aeac81f4
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo/Foo2.yml
@@ -0,0 +1,4 @@
+type: object
+properties:
+ foo:
+ $ref: ../../openapi.yml#/components/schemas/Foo
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/models/error.yaml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/models/error.yaml
new file mode 100644
index 00000000..b4d40479
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/models/error.yaml
@@ -0,0 +1,2 @@
+type: object
+title: ErrorDetails
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/issue615.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/issue615.yml
new file mode 100644
index 00000000..d1370e32
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/issue615.yml
@@ -0,0 +1,60 @@
+openapi: "3.0.3"
+info:
+ title: Deep recursive cyclic refs example
+ version: "1.0"
+paths:
+ /foo:
+ $ref: ./paths/foo.yml
+components:
+ schemas:
+ FilterColumnIncludes:
+ type: object
+ properties:
+ $includes:
+ $ref: '#/components/schemas/FilterPredicate'
+ additionalProperties: false
+ maxProperties: 1
+ minProperties: 1
+ FilterPredicate:
+ oneOf:
+ - $ref: '#/components/schemas/FilterValue'
+ - type: array
+ items:
+ $ref: '#/components/schemas/FilterPredicate'
+ minLength: 1
+ - $ref: '#/components/schemas/FilterPredicateOp'
+ - $ref: '#/components/schemas/FilterPredicateRangeOp'
+ FilterPredicateOp:
+ type: object
+ properties:
+ $any:
+ oneOf:
+ - type: array
+ items:
+ $ref: '#/components/schemas/FilterPredicate'
+ $none:
+ oneOf:
+ - $ref: '#/components/schemas/FilterPredicate'
+ - type: array
+ items:
+ $ref: '#/components/schemas/FilterPredicate'
+ additionalProperties: false
+ maxProperties: 1
+ minProperties: 1
+ FilterPredicateRangeOp:
+ type: object
+ properties:
+ $lt:
+ $ref: '#/components/schemas/FilterRangeValue'
+ additionalProperties: false
+ maxProperties: 2
+ minProperties: 2
+ FilterRangeValue:
+ oneOf:
+ - type: number
+ - type: string
+ FilterValue:
+ oneOf:
+ - type: number
+ - type: string
+ - type: boolean
\ No newline at end of file
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml
new file mode 100644
index 00000000..9f884c71
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml
@@ -0,0 +1,33 @@
+openapi: "3.0.3"
+info:
+ title: Recursive refs example
+ version: "1.0"
+paths:
+ /foo:
+ $ref: ./paths/foo.yml
+ /double-ref-foo:
+ get:
+ summary: Double ref response
+ description: Reference response with double reference.
+ responses:
+ "400":
+ $ref: "#/components/responses/400"
+components:
+ schemas:
+ Foo:
+ $ref: ./components/Foo.yml
+ Foo2:
+ $ref: ./components/Foo/Foo2.yml
+ Bar:
+ $ref: ./components/Bar.yml
+ Cat:
+ $ref: ./components/Cat.yml
+ Error:
+ $ref: ./components/models/error.yaml
+ responses:
+ "400":
+ description: 400 Bad Request
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/Error"
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml.internalized.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml.internalized.yml
new file mode 100644
index 00000000..0d508527
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml.internalized.yml
@@ -0,0 +1,110 @@
+{
+ "components": {
+ "parameters": {
+ "number": {
+ "in": "query",
+ "name": "someNumber",
+ "schema": {
+ "type": "string"
+ }
+ }
+ },
+ "responses": {
+ "400": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Error"
+ }
+ }
+ },
+ "description": "400 Bad Request"
+ }
+ },
+ "schemas": {
+ "Bar": {
+ "example": "bar",
+ "type": "string"
+ },
+ "Error":{
+ "title":"ErrorDetails",
+ "type":"object"
+ },
+ "Foo": {
+ "properties": {
+ "bar": {
+ "$ref": "#/components/schemas/Bar"
+ }
+ },
+ "type": "object"
+ },
+ "Foo2": {
+ "properties": {
+ "foo": {
+ "$ref": "#/components/schemas/Foo"
+ }
+ },
+ "type": "object"
+ },
+ "error":{
+ "title":"ErrorDetails",
+ "type":"object"
+ },
+ "Cat": {
+ "properties": {
+ "cat": {
+ "$ref": "#/components/schemas/Cat"
+ }
+ },
+ "type": "object"
+ }
+ }
+ },
+ "info": {
+ "title": "Recursive refs example",
+ "version": "1.0"
+ },
+ "openapi": "3.0.3",
+ "paths": {
+ "/double-ref-foo": {
+ "get": {
+ "description": "Reference response with double reference.",
+ "responses": {
+ "400": {
+ "$ref": "#/components/responses/400"
+ }
+ },
+ "summary": "Double ref response"
+ }
+ },
+ "/foo": {
+ "get": {
+ "responses": {
+ "200": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "properties": {
+ "foo2": {
+ "$ref": "#/components/schemas/Foo2"
+ }
+ },
+ "type": "object"
+ }
+ }
+ },
+ "description": "OK"
+ },
+ "400": {
+ "$ref": "#/components/responses/400"
+ }
+ }
+ },
+ "parameters": [
+ {
+ "$ref": "#/components/parameters/number"
+ }
+ ]
+ }
+ }
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/parameters/number.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/parameters/number.yml
new file mode 100644
index 00000000..29f0f264
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/parameters/number.yml
@@ -0,0 +1,4 @@
+name: someNumber
+in: query
+schema:
+ type: string
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/paths/foo.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/paths/foo.yml
new file mode 100644
index 00000000..4c845b53
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/paths/foo.yml
@@ -0,0 +1,15 @@
+parameters:
+ - $ref: ../parameters/number.yml
+get:
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ foo2:
+ $ref: ../openapi.yml#/components/schemas/Foo2
+ "400":
+ $ref: "../openapi.yml#/components/responses/400"
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/validation_options.go b/vendor/github.com/getkin/kin-openapi/openapi3/validation_options.go
new file mode 100644
index 00000000..8982594b
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/validation_options.go
@@ -0,0 +1,112 @@
+package openapi3
+
+import "context"
+
+// ValidationOption allows the modification of how the OpenAPI document is validated.
+type ValidationOption func(options *ValidationOptions)
+
+// ValidationOptions provides configuration for validating OpenAPI documents.
+type ValidationOptions struct {
+ examplesValidationAsReq, examplesValidationAsRes bool
+ examplesValidationDisabled bool
+ schemaDefaultsValidationDisabled bool
+ schemaFormatValidationEnabled bool
+ schemaPatternValidationDisabled bool
+ extraSiblingFieldsAllowed map[string]struct{}
+}
+
+type validationOptionsKey struct{}
+
+// AllowExtraSiblingFields called as AllowExtraSiblingFields("description") makes Validate not return an error when said field appears next to a $ref.
+func AllowExtraSiblingFields(fields ...string) ValidationOption {
+ return func(options *ValidationOptions) {
+ if options.extraSiblingFieldsAllowed == nil && len(fields) != 0 {
+ options.extraSiblingFieldsAllowed = make(map[string]struct{}, len(fields))
+ }
+ for _, field := range fields {
+ options.extraSiblingFieldsAllowed[field] = struct{}{}
+ }
+ }
+}
+
+// EnableSchemaFormatValidation makes Validate not return an error when validating documents that mention schema formats that are not defined by the OpenAPIv3 specification.
+// By default, schema format validation is disabled.
+func EnableSchemaFormatValidation() ValidationOption {
+ return func(options *ValidationOptions) {
+ options.schemaFormatValidationEnabled = true
+ }
+}
+
+// DisableSchemaFormatValidation does the opposite of EnableSchemaFormatValidation.
+// By default, schema format validation is disabled.
+func DisableSchemaFormatValidation() ValidationOption {
+ return func(options *ValidationOptions) {
+ options.schemaFormatValidationEnabled = false
+ }
+}
+
+// EnableSchemaPatternValidation does the opposite of DisableSchemaPatternValidation.
+// By default, schema pattern validation is enabled.
+func EnableSchemaPatternValidation() ValidationOption {
+ return func(options *ValidationOptions) {
+ options.schemaPatternValidationDisabled = false
+ }
+}
+
+// DisableSchemaPatternValidation makes Validate not return an error when validating patterns that are not supported by the Go regexp engine.
+func DisableSchemaPatternValidation() ValidationOption {
+ return func(options *ValidationOptions) {
+ options.schemaPatternValidationDisabled = true
+ }
+}
+
+// EnableSchemaDefaultsValidation does the opposite of DisableSchemaDefaultsValidation.
+// By default, schema default values are validated against their schema.
+func EnableSchemaDefaultsValidation() ValidationOption {
+ return func(options *ValidationOptions) {
+ options.schemaDefaultsValidationDisabled = false
+ }
+}
+
+// DisableSchemaDefaultsValidation disables schemas' default field validation.
+// By default, schema default values are validated against their schema.
+func DisableSchemaDefaultsValidation() ValidationOption {
+ return func(options *ValidationOptions) {
+ options.schemaDefaultsValidationDisabled = true
+ }
+}
+
+// EnableExamplesValidation does the opposite of DisableExamplesValidation.
+// By default, all schema examples are validated.
+func EnableExamplesValidation() ValidationOption {
+ return func(options *ValidationOptions) {
+ options.examplesValidationDisabled = false
+ }
+}
+
+// DisableExamplesValidation disables all example schema validation.
+// By default, all schema examples are validated.
+func DisableExamplesValidation() ValidationOption {
+ return func(options *ValidationOptions) {
+ options.examplesValidationDisabled = true
+ }
+}
+
+// WithValidationOptions allows adding validation options to a context object that can be used when validating any OpenAPI type.
+func WithValidationOptions(ctx context.Context, opts ...ValidationOption) context.Context {
+ if len(opts) == 0 {
+ return ctx
+ }
+ options := &ValidationOptions{}
+ for _, opt := range opts {
+ opt(options)
+ }
+ return context.WithValue(ctx, validationOptionsKey{}, options)
+}
+
+func getValidationOptions(ctx context.Context) *ValidationOptions {
+ if options, ok := ctx.Value(validationOptionsKey{}).(*ValidationOptions); ok {
+ return options
+ }
+ return &ValidationOptions{}
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/visited.go b/vendor/github.com/getkin/kin-openapi/openapi3/visited.go
new file mode 100644
index 00000000..67f970e3
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/visited.go
@@ -0,0 +1,41 @@
+package openapi3
+
+func newVisited() visitedComponent {
+ return visitedComponent{
+ header: make(map[*Header]struct{}),
+ schema: make(map[*Schema]struct{}),
+ }
+}
+
+type visitedComponent struct {
+ header map[*Header]struct{}
+ schema map[*Schema]struct{}
+}
+
+// resetVisited clears visitedComponent map
+// should be called before recursion over doc *T
+func (doc *T) resetVisited() {
+ doc.visited = newVisited()
+}
+
+// isVisitedHeader returns `true` if the *Header pointer was already visited
+// otherwise it returns `false`
+func (doc *T) isVisitedHeader(h *Header) bool {
+ if _, ok := doc.visited.header[h]; ok {
+ return true
+ }
+
+ doc.visited.header[h] = struct{}{}
+ return false
+}
+
+// isVisitedHeader returns `true` if the *Schema pointer was already visited
+// otherwise it returns `false`
+func (doc *T) isVisitedSchema(s *Schema) bool {
+ if _, ok := doc.visited.schema[s]; ok {
+ return true
+ }
+
+ doc.visited.schema[s] = struct{}{}
+ return false
+}
diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/xml.go b/vendor/github.com/getkin/kin-openapi/openapi3/xml.go
new file mode 100644
index 00000000..604b607d
--- /dev/null
+++ b/vendor/github.com/getkin/kin-openapi/openapi3/xml.go
@@ -0,0 +1,69 @@
+package openapi3
+
+import (
+ "context"
+ "encoding/json"
+)
+
+// XML is specified by OpenAPI/Swagger standard version 3.
+// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#xml-object
+type XML struct {
+ Extensions map[string]interface{} `json:"-" yaml:"-"`
+
+ Name string `json:"name,omitempty" yaml:"name,omitempty"`
+ Namespace string `json:"namespace,omitempty" yaml:"namespace,omitempty"`
+ Prefix string `json:"prefix,omitempty" yaml:"prefix,omitempty"`
+ Attribute bool `json:"attribute,omitempty" yaml:"attribute,omitempty"`
+ Wrapped bool `json:"wrapped,omitempty" yaml:"wrapped,omitempty"`
+}
+
+// MarshalJSON returns the JSON encoding of XML.
+func (xml XML) MarshalJSON() ([]byte, error) {
+ m := make(map[string]interface{}, 5+len(xml.Extensions))
+ for k, v := range xml.Extensions {
+ m[k] = v
+ }
+ if x := xml.Name; x != "" {
+ m["name"] = x
+ }
+ if x := xml.Namespace; x != "" {
+ m["namespace"] = x
+ }
+ if x := xml.Prefix; x != "" {
+ m["prefix"] = x
+ }
+ if x := xml.Attribute; x {
+ m["attribute"] = x
+ }
+ if x := xml.Wrapped; x {
+ m["wrapped"] = x
+ }
+ return json.Marshal(m)
+}
+
+// UnmarshalJSON sets XML to a copy of data.
+func (xml *XML) UnmarshalJSON(data []byte) error {
+ type XMLBis XML
+ var x XMLBis
+ if err := json.Unmarshal(data, &x); err != nil {
+ return unmarshalError(err)
+ }
+ _ = json.Unmarshal(data, &x.Extensions)
+ delete(x.Extensions, "name")
+ delete(x.Extensions, "namespace")
+ delete(x.Extensions, "prefix")
+ delete(x.Extensions, "attribute")
+ delete(x.Extensions, "wrapped")
+ if len(x.Extensions) == 0 {
+ x.Extensions = nil
+ }
+ *xml = XML(x)
+ return nil
+}
+
+// Validate returns an error if XML does not comply with the OpenAPI spec.
+func (xml *XML) Validate(ctx context.Context, opts ...ValidationOption) error {
+ ctx = WithValidationOptions(ctx, opts...)
+
+ return validateExtensions(ctx, xml.Extensions)
+}
diff --git a/vendor/github.com/go-openapi/jsonpointer/.editorconfig b/vendor/github.com/go-openapi/jsonpointer/.editorconfig
new file mode 100644
index 00000000..3152da69
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonpointer/.editorconfig
@@ -0,0 +1,26 @@
+# top-most EditorConfig file
+root = true
+
+# Unix-style newlines with a newline ending every file
+[*]
+end_of_line = lf
+insert_final_newline = true
+indent_style = space
+indent_size = 2
+trim_trailing_whitespace = true
+
+# Set default charset
+[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
+charset = utf-8
+
+# Tab indentation (no size specified)
+[*.go]
+indent_style = tab
+
+[*.md]
+trim_trailing_whitespace = false
+
+# Matches the exact files either package.json or .travis.yml
+[{package.json,.travis.yml}]
+indent_style = space
+indent_size = 2
diff --git a/vendor/github.com/go-openapi/jsonpointer/.gitignore b/vendor/github.com/go-openapi/jsonpointer/.gitignore
new file mode 100644
index 00000000..769c2440
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonpointer/.gitignore
@@ -0,0 +1 @@
+secrets.yml
diff --git a/vendor/github.com/go-openapi/jsonpointer/.golangci.yml b/vendor/github.com/go-openapi/jsonpointer/.golangci.yml
new file mode 100644
index 00000000..22f8d21c
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonpointer/.golangci.yml
@@ -0,0 +1,61 @@
+linters-settings:
+ govet:
+ check-shadowing: true
+ golint:
+ min-confidence: 0
+ gocyclo:
+ min-complexity: 45
+ maligned:
+ suggest-new: true
+ dupl:
+ threshold: 200
+ goconst:
+ min-len: 2
+ min-occurrences: 3
+
+linters:
+ enable-all: true
+ disable:
+ - maligned
+ - unparam
+ - lll
+ - gochecknoinits
+ - gochecknoglobals
+ - funlen
+ - godox
+ - gocognit
+ - whitespace
+ - wsl
+ - wrapcheck
+ - testpackage
+ - nlreturn
+ - gomnd
+ - exhaustivestruct
+ - goerr113
+ - errorlint
+ - nestif
+ - godot
+ - gofumpt
+ - paralleltest
+ - tparallel
+ - thelper
+ - ifshort
+ - exhaustruct
+ - varnamelen
+ - gci
+ - depguard
+ - errchkjson
+ - inamedparam
+ - nonamedreturns
+ - musttag
+ - ireturn
+ - forcetypeassert
+ - cyclop
+ # deprecated linters
+ - deadcode
+ - interfacer
+ - scopelint
+ - varcheck
+ - structcheck
+ - golint
+ - nosnakecase
diff --git a/vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md
new file mode 100644
index 00000000..9322b065
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md
@@ -0,0 +1,74 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of experience,
+nationality, personal appearance, race, religion, or sexual identity and
+orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at ivan+abuse@flanders.co.nz. All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. The project team is
+obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
+available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/jsonpointer/LICENSE b/vendor/github.com/go-openapi/jsonpointer/LICENSE
new file mode 100644
index 00000000..d6456956
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonpointer/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/go-openapi/jsonpointer/README.md b/vendor/github.com/go-openapi/jsonpointer/README.md
new file mode 100644
index 00000000..0108f1d5
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonpointer/README.md
@@ -0,0 +1,19 @@
+# gojsonpointer [![Build Status](https://github.com/go-openapi/jsonpointer/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/jsonpointer/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/jsonpointer/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/jsonpointer)
+
+[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
+[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/jsonpointer/master/LICENSE)
+[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/jsonpointer.svg)](https://pkg.go.dev/github.com/go-openapi/jsonpointer)
+[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/jsonpointer)](https://goreportcard.com/report/github.com/go-openapi/jsonpointer)
+
+An implementation of JSON Pointer - Go language
+
+## Status
+Completed YES
+
+Tested YES
+
+## References
+http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07
+
+### Note
+The 4.Evaluation part of the previous reference, starting with 'If the currently referenced value is a JSON array, the reference token MUST contain either...' is not implemented.
diff --git a/vendor/github.com/go-openapi/jsonpointer/pointer.go b/vendor/github.com/go-openapi/jsonpointer/pointer.go
new file mode 100644
index 00000000..d975773d
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonpointer/pointer.go
@@ -0,0 +1,531 @@
+// Copyright 2013 sigu-399 ( https://github.com/sigu-399 )
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// author sigu-399
+// author-github https://github.com/sigu-399
+// author-mail sigu.399@gmail.com
+//
+// repository-name jsonpointer
+// repository-desc An implementation of JSON Pointer - Go language
+//
+// description Main and unique file.
+//
+// created 25-02-2013
+
+package jsonpointer
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "reflect"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/swag"
+)
+
+const (
+ emptyPointer = ``
+ pointerSeparator = `/`
+
+ invalidStart = `JSON pointer must be empty or start with a "` + pointerSeparator
+ notFound = `Can't find the pointer in the document`
+)
+
+var jsonPointableType = reflect.TypeOf(new(JSONPointable)).Elem()
+var jsonSetableType = reflect.TypeOf(new(JSONSetable)).Elem()
+
+// JSONPointable is an interface for structs to implement when they need to customize the
+// json pointer process
+type JSONPointable interface {
+ JSONLookup(string) (any, error)
+}
+
+// JSONSetable is an interface for structs to implement when they need to customize the
+// json pointer process
+type JSONSetable interface {
+ JSONSet(string, any) error
+}
+
+// New creates a new json pointer for the given string
+func New(jsonPointerString string) (Pointer, error) {
+
+ var p Pointer
+ err := p.parse(jsonPointerString)
+ return p, err
+
+}
+
+// Pointer the json pointer reprsentation
+type Pointer struct {
+ referenceTokens []string
+}
+
+// "Constructor", parses the given string JSON pointer
+func (p *Pointer) parse(jsonPointerString string) error {
+
+ var err error
+
+ if jsonPointerString != emptyPointer {
+ if !strings.HasPrefix(jsonPointerString, pointerSeparator) {
+ err = errors.New(invalidStart)
+ } else {
+ referenceTokens := strings.Split(jsonPointerString, pointerSeparator)
+ p.referenceTokens = append(p.referenceTokens, referenceTokens[1:]...)
+ }
+ }
+
+ return err
+}
+
+// Get uses the pointer to retrieve a value from a JSON document
+func (p *Pointer) Get(document any) (any, reflect.Kind, error) {
+ return p.get(document, swag.DefaultJSONNameProvider)
+}
+
+// Set uses the pointer to set a value from a JSON document
+func (p *Pointer) Set(document any, value any) (any, error) {
+ return document, p.set(document, value, swag.DefaultJSONNameProvider)
+}
+
+// GetForToken gets a value for a json pointer token 1 level deep
+func GetForToken(document any, decodedToken string) (any, reflect.Kind, error) {
+ return getSingleImpl(document, decodedToken, swag.DefaultJSONNameProvider)
+}
+
+// SetForToken gets a value for a json pointer token 1 level deep
+func SetForToken(document any, decodedToken string, value any) (any, error) {
+ return document, setSingleImpl(document, value, decodedToken, swag.DefaultJSONNameProvider)
+}
+
+func isNil(input any) bool {
+ if input == nil {
+ return true
+ }
+
+ kind := reflect.TypeOf(input).Kind()
+ switch kind { //nolint:exhaustive
+ case reflect.Ptr, reflect.Map, reflect.Slice, reflect.Chan:
+ return reflect.ValueOf(input).IsNil()
+ default:
+ return false
+ }
+}
+
+func getSingleImpl(node any, decodedToken string, nameProvider *swag.NameProvider) (any, reflect.Kind, error) {
+ rValue := reflect.Indirect(reflect.ValueOf(node))
+ kind := rValue.Kind()
+ if isNil(node) {
+ return nil, kind, fmt.Errorf("nil value has not field %q", decodedToken)
+ }
+
+ switch typed := node.(type) {
+ case JSONPointable:
+ r, err := typed.JSONLookup(decodedToken)
+ if err != nil {
+ return nil, kind, err
+ }
+ return r, kind, nil
+ case *any: // case of a pointer to interface, that is not resolved by reflect.Indirect
+ return getSingleImpl(*typed, decodedToken, nameProvider)
+ }
+
+ switch kind { //nolint:exhaustive
+ case reflect.Struct:
+ nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken)
+ if !ok {
+ return nil, kind, fmt.Errorf("object has no field %q", decodedToken)
+ }
+ fld := rValue.FieldByName(nm)
+ return fld.Interface(), kind, nil
+
+ case reflect.Map:
+ kv := reflect.ValueOf(decodedToken)
+ mv := rValue.MapIndex(kv)
+
+ if mv.IsValid() {
+ return mv.Interface(), kind, nil
+ }
+ return nil, kind, fmt.Errorf("object has no key %q", decodedToken)
+
+ case reflect.Slice:
+ tokenIndex, err := strconv.Atoi(decodedToken)
+ if err != nil {
+ return nil, kind, err
+ }
+ sLength := rValue.Len()
+ if tokenIndex < 0 || tokenIndex >= sLength {
+ return nil, kind, fmt.Errorf("index out of bounds array[0,%d] index '%d'", sLength-1, tokenIndex)
+ }
+
+ elem := rValue.Index(tokenIndex)
+ return elem.Interface(), kind, nil
+
+ default:
+ return nil, kind, fmt.Errorf("invalid token reference %q", decodedToken)
+ }
+
+}
+
+func setSingleImpl(node, data any, decodedToken string, nameProvider *swag.NameProvider) error {
+ rValue := reflect.Indirect(reflect.ValueOf(node))
+
+ if ns, ok := node.(JSONSetable); ok { // pointer impl
+ return ns.JSONSet(decodedToken, data)
+ }
+
+ if rValue.Type().Implements(jsonSetableType) {
+ return node.(JSONSetable).JSONSet(decodedToken, data)
+ }
+
+ switch rValue.Kind() { //nolint:exhaustive
+ case reflect.Struct:
+ nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken)
+ if !ok {
+ return fmt.Errorf("object has no field %q", decodedToken)
+ }
+ fld := rValue.FieldByName(nm)
+ if fld.IsValid() {
+ fld.Set(reflect.ValueOf(data))
+ }
+ return nil
+
+ case reflect.Map:
+ kv := reflect.ValueOf(decodedToken)
+ rValue.SetMapIndex(kv, reflect.ValueOf(data))
+ return nil
+
+ case reflect.Slice:
+ tokenIndex, err := strconv.Atoi(decodedToken)
+ if err != nil {
+ return err
+ }
+ sLength := rValue.Len()
+ if tokenIndex < 0 || tokenIndex >= sLength {
+ return fmt.Errorf("index out of bounds array[0,%d] index '%d'", sLength, tokenIndex)
+ }
+
+ elem := rValue.Index(tokenIndex)
+ if !elem.CanSet() {
+ return fmt.Errorf("can't set slice index %s to %v", decodedToken, data)
+ }
+ elem.Set(reflect.ValueOf(data))
+ return nil
+
+ default:
+ return fmt.Errorf("invalid token reference %q", decodedToken)
+ }
+
+}
+
+func (p *Pointer) get(node any, nameProvider *swag.NameProvider) (any, reflect.Kind, error) {
+
+ if nameProvider == nil {
+ nameProvider = swag.DefaultJSONNameProvider
+ }
+
+ kind := reflect.Invalid
+
+ // Full document when empty
+ if len(p.referenceTokens) == 0 {
+ return node, kind, nil
+ }
+
+ for _, token := range p.referenceTokens {
+
+ decodedToken := Unescape(token)
+
+ r, knd, err := getSingleImpl(node, decodedToken, nameProvider)
+ if err != nil {
+ return nil, knd, err
+ }
+ node = r
+ }
+
+ rValue := reflect.ValueOf(node)
+ kind = rValue.Kind()
+
+ return node, kind, nil
+}
+
+func (p *Pointer) set(node, data any, nameProvider *swag.NameProvider) error {
+ knd := reflect.ValueOf(node).Kind()
+
+ if knd != reflect.Ptr && knd != reflect.Struct && knd != reflect.Map && knd != reflect.Slice && knd != reflect.Array {
+ return fmt.Errorf("only structs, pointers, maps and slices are supported for setting values")
+ }
+
+ if nameProvider == nil {
+ nameProvider = swag.DefaultJSONNameProvider
+ }
+
+ // Full document when empty
+ if len(p.referenceTokens) == 0 {
+ return nil
+ }
+
+ lastI := len(p.referenceTokens) - 1
+ for i, token := range p.referenceTokens {
+ isLastToken := i == lastI
+ decodedToken := Unescape(token)
+
+ if isLastToken {
+
+ return setSingleImpl(node, data, decodedToken, nameProvider)
+ }
+
+ rValue := reflect.Indirect(reflect.ValueOf(node))
+ kind := rValue.Kind()
+
+ if rValue.Type().Implements(jsonPointableType) {
+ r, err := node.(JSONPointable).JSONLookup(decodedToken)
+ if err != nil {
+ return err
+ }
+ fld := reflect.ValueOf(r)
+ if fld.CanAddr() && fld.Kind() != reflect.Interface && fld.Kind() != reflect.Map && fld.Kind() != reflect.Slice && fld.Kind() != reflect.Ptr {
+ node = fld.Addr().Interface()
+ continue
+ }
+ node = r
+ continue
+ }
+
+ switch kind { //nolint:exhaustive
+ case reflect.Struct:
+ nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken)
+ if !ok {
+ return fmt.Errorf("object has no field %q", decodedToken)
+ }
+ fld := rValue.FieldByName(nm)
+ if fld.CanAddr() && fld.Kind() != reflect.Interface && fld.Kind() != reflect.Map && fld.Kind() != reflect.Slice && fld.Kind() != reflect.Ptr {
+ node = fld.Addr().Interface()
+ continue
+ }
+ node = fld.Interface()
+
+ case reflect.Map:
+ kv := reflect.ValueOf(decodedToken)
+ mv := rValue.MapIndex(kv)
+
+ if !mv.IsValid() {
+ return fmt.Errorf("object has no key %q", decodedToken)
+ }
+ if mv.CanAddr() && mv.Kind() != reflect.Interface && mv.Kind() != reflect.Map && mv.Kind() != reflect.Slice && mv.Kind() != reflect.Ptr {
+ node = mv.Addr().Interface()
+ continue
+ }
+ node = mv.Interface()
+
+ case reflect.Slice:
+ tokenIndex, err := strconv.Atoi(decodedToken)
+ if err != nil {
+ return err
+ }
+ sLength := rValue.Len()
+ if tokenIndex < 0 || tokenIndex >= sLength {
+ return fmt.Errorf("index out of bounds array[0,%d] index '%d'", sLength, tokenIndex)
+ }
+
+ elem := rValue.Index(tokenIndex)
+ if elem.CanAddr() && elem.Kind() != reflect.Interface && elem.Kind() != reflect.Map && elem.Kind() != reflect.Slice && elem.Kind() != reflect.Ptr {
+ node = elem.Addr().Interface()
+ continue
+ }
+ node = elem.Interface()
+
+ default:
+ return fmt.Errorf("invalid token reference %q", decodedToken)
+ }
+
+ }
+
+ return nil
+}
+
+// DecodedTokens returns the decoded tokens
+func (p *Pointer) DecodedTokens() []string {
+ result := make([]string, 0, len(p.referenceTokens))
+ for _, t := range p.referenceTokens {
+ result = append(result, Unescape(t))
+ }
+ return result
+}
+
+// IsEmpty returns true if this is an empty json pointer
+// this indicates that it points to the root document
+func (p *Pointer) IsEmpty() bool {
+ return len(p.referenceTokens) == 0
+}
+
+// Pointer to string representation function
+func (p *Pointer) String() string {
+
+ if len(p.referenceTokens) == 0 {
+ return emptyPointer
+ }
+
+ pointerString := pointerSeparator + strings.Join(p.referenceTokens, pointerSeparator)
+
+ return pointerString
+}
+
+func (p *Pointer) Offset(document string) (int64, error) {
+ dec := json.NewDecoder(strings.NewReader(document))
+ var offset int64
+ for _, ttk := range p.DecodedTokens() {
+ tk, err := dec.Token()
+ if err != nil {
+ return 0, err
+ }
+ switch tk := tk.(type) {
+ case json.Delim:
+ switch tk {
+ case '{':
+ offset, err = offsetSingleObject(dec, ttk)
+ if err != nil {
+ return 0, err
+ }
+ case '[':
+ offset, err = offsetSingleArray(dec, ttk)
+ if err != nil {
+ return 0, err
+ }
+ default:
+ return 0, fmt.Errorf("invalid token %#v", tk)
+ }
+ default:
+ return 0, fmt.Errorf("invalid token %#v", tk)
+ }
+ }
+ return offset, nil
+}
+
+func offsetSingleObject(dec *json.Decoder, decodedToken string) (int64, error) {
+ for dec.More() {
+ offset := dec.InputOffset()
+ tk, err := dec.Token()
+ if err != nil {
+ return 0, err
+ }
+ switch tk := tk.(type) {
+ case json.Delim:
+ switch tk {
+ case '{':
+ if err = drainSingle(dec); err != nil {
+ return 0, err
+ }
+ case '[':
+ if err = drainSingle(dec); err != nil {
+ return 0, err
+ }
+ }
+ case string:
+ if tk == decodedToken {
+ return offset, nil
+ }
+ default:
+ return 0, fmt.Errorf("invalid token %#v", tk)
+ }
+ }
+ return 0, fmt.Errorf("token reference %q not found", decodedToken)
+}
+
+func offsetSingleArray(dec *json.Decoder, decodedToken string) (int64, error) {
+ idx, err := strconv.Atoi(decodedToken)
+ if err != nil {
+ return 0, fmt.Errorf("token reference %q is not a number: %v", decodedToken, err)
+ }
+ var i int
+ for i = 0; i < idx && dec.More(); i++ {
+ tk, err := dec.Token()
+ if err != nil {
+ return 0, err
+ }
+
+ if delim, isDelim := tk.(json.Delim); isDelim {
+ switch delim {
+ case '{':
+ if err = drainSingle(dec); err != nil {
+ return 0, err
+ }
+ case '[':
+ if err = drainSingle(dec); err != nil {
+ return 0, err
+ }
+ }
+ }
+ }
+
+ if !dec.More() {
+ return 0, fmt.Errorf("token reference %q not found", decodedToken)
+ }
+ return dec.InputOffset(), nil
+}
+
+// drainSingle drains a single level of object or array.
+// The decoder has to guarantee the beginning delim (i.e. '{' or '[') has been consumed.
+func drainSingle(dec *json.Decoder) error {
+ for dec.More() {
+ tk, err := dec.Token()
+ if err != nil {
+ return err
+ }
+ if delim, isDelim := tk.(json.Delim); isDelim {
+ switch delim {
+ case '{':
+ if err = drainSingle(dec); err != nil {
+ return err
+ }
+ case '[':
+ if err = drainSingle(dec); err != nil {
+ return err
+ }
+ }
+ }
+ }
+
+ // Consumes the ending delim
+ if _, err := dec.Token(); err != nil {
+ return err
+ }
+ return nil
+}
+
+// Specific JSON pointer encoding here
+// ~0 => ~
+// ~1 => /
+// ... and vice versa
+
+const (
+ encRefTok0 = `~0`
+ encRefTok1 = `~1`
+ decRefTok0 = `~`
+ decRefTok1 = `/`
+)
+
+// Unescape unescapes a json pointer reference token string to the original representation
+func Unescape(token string) string {
+ step1 := strings.ReplaceAll(token, encRefTok1, decRefTok1)
+ step2 := strings.ReplaceAll(step1, encRefTok0, decRefTok0)
+ return step2
+}
+
+// Escape escapes a pointer reference token string
+func Escape(token string) string {
+ step1 := strings.ReplaceAll(token, decRefTok0, encRefTok0)
+ step2 := strings.ReplaceAll(step1, decRefTok1, encRefTok1)
+ return step2
+}
diff --git a/vendor/github.com/go-openapi/swag/.editorconfig b/vendor/github.com/go-openapi/swag/.editorconfig
new file mode 100644
index 00000000..3152da69
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/.editorconfig
@@ -0,0 +1,26 @@
+# top-most EditorConfig file
+root = true
+
+# Unix-style newlines with a newline ending every file
+[*]
+end_of_line = lf
+insert_final_newline = true
+indent_style = space
+indent_size = 2
+trim_trailing_whitespace = true
+
+# Set default charset
+[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
+charset = utf-8
+
+# Tab indentation (no size specified)
+[*.go]
+indent_style = tab
+
+[*.md]
+trim_trailing_whitespace = false
+
+# Matches the exact files either package.json or .travis.yml
+[{package.json,.travis.yml}]
+indent_style = space
+indent_size = 2
diff --git a/vendor/github.com/go-openapi/swag/.gitattributes b/vendor/github.com/go-openapi/swag/.gitattributes
new file mode 100644
index 00000000..49ad5276
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/.gitattributes
@@ -0,0 +1,2 @@
+# gofmt always uses LF, whereas Git uses CRLF on Windows.
+*.go text eol=lf
diff --git a/vendor/github.com/go-openapi/swag/.gitignore b/vendor/github.com/go-openapi/swag/.gitignore
new file mode 100644
index 00000000..c4b1b64f
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/.gitignore
@@ -0,0 +1,5 @@
+secrets.yml
+vendor
+Godeps
+.idea
+*.out
diff --git a/vendor/github.com/go-openapi/swag/.golangci.yml b/vendor/github.com/go-openapi/swag/.golangci.yml
new file mode 100644
index 00000000..80e2be00
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/.golangci.yml
@@ -0,0 +1,60 @@
+linters-settings:
+ govet:
+ check-shadowing: true
+ golint:
+ min-confidence: 0
+ gocyclo:
+ min-complexity: 45
+ maligned:
+ suggest-new: true
+ dupl:
+ threshold: 200
+ goconst:
+ min-len: 3
+ min-occurrences: 3
+
+linters:
+ enable-all: true
+ disable:
+ - maligned
+ - lll
+ - gochecknoinits
+ - gochecknoglobals
+ - funlen
+ - godox
+ - gocognit
+ - whitespace
+ - wsl
+ - wrapcheck
+ - testpackage
+ - nlreturn
+ - gomnd
+ - exhaustivestruct
+ - goerr113
+ - errorlint
+ - nestif
+ - godot
+ - gofumpt
+ - paralleltest
+ - tparallel
+ - thelper
+ - ifshort
+ - exhaustruct
+ - varnamelen
+ - gci
+ - depguard
+ - errchkjson
+ - inamedparam
+ - nonamedreturns
+ - musttag
+ - ireturn
+ - forcetypeassert
+ - cyclop
+ # deprecated linters
+ - deadcode
+ - interfacer
+ - scopelint
+ - varcheck
+ - structcheck
+ - golint
+ - nosnakecase
diff --git a/vendor/github.com/go-openapi/swag/BENCHMARK.md b/vendor/github.com/go-openapi/swag/BENCHMARK.md
new file mode 100644
index 00000000..e7f28ed6
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/BENCHMARK.md
@@ -0,0 +1,52 @@
+# Benchmarks
+
+## Name mangling utilities
+
+```bash
+go test -bench XXX -run XXX -benchtime 30s
+```
+
+### Benchmarks at b3e7a5386f996177e4808f11acb2aa93a0f660df
+
+```
+goos: linux
+goarch: amd64
+pkg: github.com/go-openapi/swag
+cpu: Intel(R) Core(TM) i5-6200U CPU @ 2.30GHz
+BenchmarkToXXXName/ToGoName-4 862623 44101 ns/op 10450 B/op 732 allocs/op
+BenchmarkToXXXName/ToVarName-4 853656 40728 ns/op 10468 B/op 734 allocs/op
+BenchmarkToXXXName/ToFileName-4 1268312 27813 ns/op 9785 B/op 617 allocs/op
+BenchmarkToXXXName/ToCommandName-4 1276322 27903 ns/op 9785 B/op 617 allocs/op
+BenchmarkToXXXName/ToHumanNameLower-4 895334 40354 ns/op 10472 B/op 731 allocs/op
+BenchmarkToXXXName/ToHumanNameTitle-4 882441 40678 ns/op 10566 B/op 749 allocs/op
+```
+
+### Benchmarks after PR #79
+
+~ x10 performance improvement and ~ /100 memory allocations.
+
+```
+goos: linux
+goarch: amd64
+pkg: github.com/go-openapi/swag
+cpu: Intel(R) Core(TM) i5-6200U CPU @ 2.30GHz
+BenchmarkToXXXName/ToGoName-4 9595830 3991 ns/op 42 B/op 5 allocs/op
+BenchmarkToXXXName/ToVarName-4 9194276 3984 ns/op 62 B/op 7 allocs/op
+BenchmarkToXXXName/ToFileName-4 17002711 2123 ns/op 147 B/op 7 allocs/op
+BenchmarkToXXXName/ToCommandName-4 16772926 2111 ns/op 147 B/op 7 allocs/op
+BenchmarkToXXXName/ToHumanNameLower-4 9788331 3749 ns/op 92 B/op 6 allocs/op
+BenchmarkToXXXName/ToHumanNameTitle-4 9188260 3941 ns/op 104 B/op 6 allocs/op
+```
+
+```
+goos: linux
+goarch: amd64
+pkg: github.com/go-openapi/swag
+cpu: AMD Ryzen 7 5800X 8-Core Processor
+BenchmarkToXXXName/ToGoName-16 18527378 1972 ns/op 42 B/op 5 allocs/op
+BenchmarkToXXXName/ToVarName-16 15552692 2093 ns/op 62 B/op 7 allocs/op
+BenchmarkToXXXName/ToFileName-16 32161176 1117 ns/op 147 B/op 7 allocs/op
+BenchmarkToXXXName/ToCommandName-16 32256634 1137 ns/op 147 B/op 7 allocs/op
+BenchmarkToXXXName/ToHumanNameLower-16 18599661 1946 ns/op 92 B/op 6 allocs/op
+BenchmarkToXXXName/ToHumanNameTitle-16 17581353 2054 ns/op 105 B/op 6 allocs/op
+```
diff --git a/vendor/github.com/go-openapi/swag/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/swag/CODE_OF_CONDUCT.md
new file mode 100644
index 00000000..9322b065
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/CODE_OF_CONDUCT.md
@@ -0,0 +1,74 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of experience,
+nationality, personal appearance, race, religion, or sexual identity and
+orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at ivan+abuse@flanders.co.nz. All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. The project team is
+obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
+available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/swag/LICENSE b/vendor/github.com/go-openapi/swag/LICENSE
new file mode 100644
index 00000000..d6456956
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/go-openapi/swag/README.md b/vendor/github.com/go-openapi/swag/README.md
new file mode 100644
index 00000000..a7292229
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/README.md
@@ -0,0 +1,23 @@
+# Swag [![Build Status](https://github.com/go-openapi/swag/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/swag/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/swag/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/swag)
+
+[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
+[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/swag/master/LICENSE)
+[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/swag.svg)](https://pkg.go.dev/github.com/go-openapi/swag)
+[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/swag)](https://goreportcard.com/report/github.com/go-openapi/swag)
+
+Contains a bunch of helper functions for go-openapi and go-swagger projects.
+
+You may also use it standalone for your projects.
+
+* convert between value and pointers for builtin types
+* convert from string to builtin types (wraps strconv)
+* fast json concatenation
+* search in path
+* load from file or http
+* name mangling
+
+
+This repo has only few dependencies outside of the standard library:
+
+* YAML utilities depend on `gopkg.in/yaml.v3`
+* `github.com/mailru/easyjson v0.7.7`
diff --git a/vendor/github.com/go-openapi/swag/convert.go b/vendor/github.com/go-openapi/swag/convert.go
new file mode 100644
index 00000000..fc085aeb
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/convert.go
@@ -0,0 +1,208 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import (
+ "math"
+ "strconv"
+ "strings"
+)
+
+// same as ECMA Number.MAX_SAFE_INTEGER and Number.MIN_SAFE_INTEGER
+const (
+ maxJSONFloat = float64(1<<53 - 1) // 9007199254740991.0 2^53 - 1
+ minJSONFloat = -float64(1<<53 - 1) //-9007199254740991.0 -2^53 - 1
+ epsilon float64 = 1e-9
+)
+
+// IsFloat64AJSONInteger allow for integers [-2^53, 2^53-1] inclusive
+func IsFloat64AJSONInteger(f float64) bool {
+ if math.IsNaN(f) || math.IsInf(f, 0) || f < minJSONFloat || f > maxJSONFloat {
+ return false
+ }
+ fa := math.Abs(f)
+ g := float64(uint64(f))
+ ga := math.Abs(g)
+
+ diff := math.Abs(f - g)
+
+ // more info: https://floating-point-gui.de/errors/comparison/#look-out-for-edge-cases
+ switch {
+ case f == g: // best case
+ return true
+ case f == float64(int64(f)) || f == float64(uint64(f)): // optimistic case
+ return true
+ case f == 0 || g == 0 || diff < math.SmallestNonzeroFloat64: // very close to 0 values
+ return diff < (epsilon * math.SmallestNonzeroFloat64)
+ }
+ // check the relative error
+ return diff/math.Min(fa+ga, math.MaxFloat64) < epsilon
+}
+
+var evaluatesAsTrue map[string]struct{}
+
+func init() {
+ evaluatesAsTrue = map[string]struct{}{
+ "true": {},
+ "1": {},
+ "yes": {},
+ "ok": {},
+ "y": {},
+ "on": {},
+ "selected": {},
+ "checked": {},
+ "t": {},
+ "enabled": {},
+ }
+}
+
+// ConvertBool turn a string into a boolean
+func ConvertBool(str string) (bool, error) {
+ _, ok := evaluatesAsTrue[strings.ToLower(str)]
+ return ok, nil
+}
+
+// ConvertFloat32 turn a string into a float32
+func ConvertFloat32(str string) (float32, error) {
+ f, err := strconv.ParseFloat(str, 32)
+ if err != nil {
+ return 0, err
+ }
+ return float32(f), nil
+}
+
+// ConvertFloat64 turn a string into a float64
+func ConvertFloat64(str string) (float64, error) {
+ return strconv.ParseFloat(str, 64)
+}
+
+// ConvertInt8 turn a string into an int8
+func ConvertInt8(str string) (int8, error) {
+ i, err := strconv.ParseInt(str, 10, 8)
+ if err != nil {
+ return 0, err
+ }
+ return int8(i), nil
+}
+
+// ConvertInt16 turn a string into an int16
+func ConvertInt16(str string) (int16, error) {
+ i, err := strconv.ParseInt(str, 10, 16)
+ if err != nil {
+ return 0, err
+ }
+ return int16(i), nil
+}
+
+// ConvertInt32 turn a string into an int32
+func ConvertInt32(str string) (int32, error) {
+ i, err := strconv.ParseInt(str, 10, 32)
+ if err != nil {
+ return 0, err
+ }
+ return int32(i), nil
+}
+
+// ConvertInt64 turn a string into an int64
+func ConvertInt64(str string) (int64, error) {
+ return strconv.ParseInt(str, 10, 64)
+}
+
+// ConvertUint8 turn a string into an uint8
+func ConvertUint8(str string) (uint8, error) {
+ i, err := strconv.ParseUint(str, 10, 8)
+ if err != nil {
+ return 0, err
+ }
+ return uint8(i), nil
+}
+
+// ConvertUint16 turn a string into an uint16
+func ConvertUint16(str string) (uint16, error) {
+ i, err := strconv.ParseUint(str, 10, 16)
+ if err != nil {
+ return 0, err
+ }
+ return uint16(i), nil
+}
+
+// ConvertUint32 turn a string into an uint32
+func ConvertUint32(str string) (uint32, error) {
+ i, err := strconv.ParseUint(str, 10, 32)
+ if err != nil {
+ return 0, err
+ }
+ return uint32(i), nil
+}
+
+// ConvertUint64 turn a string into an uint64
+func ConvertUint64(str string) (uint64, error) {
+ return strconv.ParseUint(str, 10, 64)
+}
+
+// FormatBool turns a boolean into a string
+func FormatBool(value bool) string {
+ return strconv.FormatBool(value)
+}
+
+// FormatFloat32 turns a float32 into a string
+func FormatFloat32(value float32) string {
+ return strconv.FormatFloat(float64(value), 'f', -1, 32)
+}
+
+// FormatFloat64 turns a float64 into a string
+func FormatFloat64(value float64) string {
+ return strconv.FormatFloat(value, 'f', -1, 64)
+}
+
+// FormatInt8 turns an int8 into a string
+func FormatInt8(value int8) string {
+ return strconv.FormatInt(int64(value), 10)
+}
+
+// FormatInt16 turns an int16 into a string
+func FormatInt16(value int16) string {
+ return strconv.FormatInt(int64(value), 10)
+}
+
+// FormatInt32 turns an int32 into a string
+func FormatInt32(value int32) string {
+ return strconv.Itoa(int(value))
+}
+
+// FormatInt64 turns an int64 into a string
+func FormatInt64(value int64) string {
+ return strconv.FormatInt(value, 10)
+}
+
+// FormatUint8 turns an uint8 into a string
+func FormatUint8(value uint8) string {
+ return strconv.FormatUint(uint64(value), 10)
+}
+
+// FormatUint16 turns an uint16 into a string
+func FormatUint16(value uint16) string {
+ return strconv.FormatUint(uint64(value), 10)
+}
+
+// FormatUint32 turns an uint32 into a string
+func FormatUint32(value uint32) string {
+ return strconv.FormatUint(uint64(value), 10)
+}
+
+// FormatUint64 turns an uint64 into a string
+func FormatUint64(value uint64) string {
+ return strconv.FormatUint(value, 10)
+}
diff --git a/vendor/github.com/go-openapi/swag/convert_types.go b/vendor/github.com/go-openapi/swag/convert_types.go
new file mode 100644
index 00000000..c49cc473
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/convert_types.go
@@ -0,0 +1,730 @@
+package swag
+
+import "time"
+
+// This file was taken from the aws go sdk
+
+// String returns a pointer to of the string value passed in.
+func String(v string) *string {
+ return &v
+}
+
+// StringValue returns the value of the string pointer passed in or
+// "" if the pointer is nil.
+func StringValue(v *string) string {
+ if v != nil {
+ return *v
+ }
+ return ""
+}
+
+// StringSlice converts a slice of string values into a slice of
+// string pointers
+func StringSlice(src []string) []*string {
+ dst := make([]*string, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// StringValueSlice converts a slice of string pointers into a slice of
+// string values
+func StringValueSlice(src []*string) []string {
+ dst := make([]string, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// StringMap converts a string map of string values into a string
+// map of string pointers
+func StringMap(src map[string]string) map[string]*string {
+ dst := make(map[string]*string)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// StringValueMap converts a string map of string pointers into a string
+// map of string values
+func StringValueMap(src map[string]*string) map[string]string {
+ dst := make(map[string]string)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
+
+// Bool returns a pointer to of the bool value passed in.
+func Bool(v bool) *bool {
+ return &v
+}
+
+// BoolValue returns the value of the bool pointer passed in or
+// false if the pointer is nil.
+func BoolValue(v *bool) bool {
+ if v != nil {
+ return *v
+ }
+ return false
+}
+
+// BoolSlice converts a slice of bool values into a slice of
+// bool pointers
+func BoolSlice(src []bool) []*bool {
+ dst := make([]*bool, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// BoolValueSlice converts a slice of bool pointers into a slice of
+// bool values
+func BoolValueSlice(src []*bool) []bool {
+ dst := make([]bool, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// BoolMap converts a string map of bool values into a string
+// map of bool pointers
+func BoolMap(src map[string]bool) map[string]*bool {
+ dst := make(map[string]*bool)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// BoolValueMap converts a string map of bool pointers into a string
+// map of bool values
+func BoolValueMap(src map[string]*bool) map[string]bool {
+ dst := make(map[string]bool)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
+
+// Int returns a pointer to of the int value passed in.
+func Int(v int) *int {
+ return &v
+}
+
+// IntValue returns the value of the int pointer passed in or
+// 0 if the pointer is nil.
+func IntValue(v *int) int {
+ if v != nil {
+ return *v
+ }
+ return 0
+}
+
+// IntSlice converts a slice of int values into a slice of
+// int pointers
+func IntSlice(src []int) []*int {
+ dst := make([]*int, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// IntValueSlice converts a slice of int pointers into a slice of
+// int values
+func IntValueSlice(src []*int) []int {
+ dst := make([]int, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// IntMap converts a string map of int values into a string
+// map of int pointers
+func IntMap(src map[string]int) map[string]*int {
+ dst := make(map[string]*int)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// IntValueMap converts a string map of int pointers into a string
+// map of int values
+func IntValueMap(src map[string]*int) map[string]int {
+ dst := make(map[string]int)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
+
+// Int32 returns a pointer to of the int32 value passed in.
+func Int32(v int32) *int32 {
+ return &v
+}
+
+// Int32Value returns the value of the int32 pointer passed in or
+// 0 if the pointer is nil.
+func Int32Value(v *int32) int32 {
+ if v != nil {
+ return *v
+ }
+ return 0
+}
+
+// Int32Slice converts a slice of int32 values into a slice of
+// int32 pointers
+func Int32Slice(src []int32) []*int32 {
+ dst := make([]*int32, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// Int32ValueSlice converts a slice of int32 pointers into a slice of
+// int32 values
+func Int32ValueSlice(src []*int32) []int32 {
+ dst := make([]int32, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// Int32Map converts a string map of int32 values into a string
+// map of int32 pointers
+func Int32Map(src map[string]int32) map[string]*int32 {
+ dst := make(map[string]*int32)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// Int32ValueMap converts a string map of int32 pointers into a string
+// map of int32 values
+func Int32ValueMap(src map[string]*int32) map[string]int32 {
+ dst := make(map[string]int32)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
+
+// Int64 returns a pointer to of the int64 value passed in.
+func Int64(v int64) *int64 {
+ return &v
+}
+
+// Int64Value returns the value of the int64 pointer passed in or
+// 0 if the pointer is nil.
+func Int64Value(v *int64) int64 {
+ if v != nil {
+ return *v
+ }
+ return 0
+}
+
+// Int64Slice converts a slice of int64 values into a slice of
+// int64 pointers
+func Int64Slice(src []int64) []*int64 {
+ dst := make([]*int64, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// Int64ValueSlice converts a slice of int64 pointers into a slice of
+// int64 values
+func Int64ValueSlice(src []*int64) []int64 {
+ dst := make([]int64, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// Int64Map converts a string map of int64 values into a string
+// map of int64 pointers
+func Int64Map(src map[string]int64) map[string]*int64 {
+ dst := make(map[string]*int64)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// Int64ValueMap converts a string map of int64 pointers into a string
+// map of int64 values
+func Int64ValueMap(src map[string]*int64) map[string]int64 {
+ dst := make(map[string]int64)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
+
+// Uint16 returns a pointer to of the uint16 value passed in.
+func Uint16(v uint16) *uint16 {
+ return &v
+}
+
+// Uint16Value returns the value of the uint16 pointer passed in or
+// 0 if the pointer is nil.
+func Uint16Value(v *uint16) uint16 {
+ if v != nil {
+ return *v
+ }
+
+ return 0
+}
+
+// Uint16Slice converts a slice of uint16 values into a slice of
+// uint16 pointers
+func Uint16Slice(src []uint16) []*uint16 {
+ dst := make([]*uint16, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+
+ return dst
+}
+
+// Uint16ValueSlice converts a slice of uint16 pointers into a slice of
+// uint16 values
+func Uint16ValueSlice(src []*uint16) []uint16 {
+ dst := make([]uint16, len(src))
+
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+
+ return dst
+}
+
+// Uint16Map converts a string map of uint16 values into a string
+// map of uint16 pointers
+func Uint16Map(src map[string]uint16) map[string]*uint16 {
+ dst := make(map[string]*uint16)
+
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+
+ return dst
+}
+
+// Uint16ValueMap converts a string map of uint16 pointers into a string
+// map of uint16 values
+func Uint16ValueMap(src map[string]*uint16) map[string]uint16 {
+ dst := make(map[string]uint16)
+
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+
+ return dst
+}
+
+// Uint returns a pointer to of the uint value passed in.
+func Uint(v uint) *uint {
+ return &v
+}
+
+// UintValue returns the value of the uint pointer passed in or
+// 0 if the pointer is nil.
+func UintValue(v *uint) uint {
+ if v != nil {
+ return *v
+ }
+ return 0
+}
+
+// UintSlice converts a slice of uint values into a slice of
+// uint pointers
+func UintSlice(src []uint) []*uint {
+ dst := make([]*uint, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// UintValueSlice converts a slice of uint pointers into a slice of
+// uint values
+func UintValueSlice(src []*uint) []uint {
+ dst := make([]uint, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// UintMap converts a string map of uint values into a string
+// map of uint pointers
+func UintMap(src map[string]uint) map[string]*uint {
+ dst := make(map[string]*uint)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// UintValueMap converts a string map of uint pointers into a string
+// map of uint values
+func UintValueMap(src map[string]*uint) map[string]uint {
+ dst := make(map[string]uint)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
+
+// Uint32 returns a pointer to of the uint32 value passed in.
+func Uint32(v uint32) *uint32 {
+ return &v
+}
+
+// Uint32Value returns the value of the uint32 pointer passed in or
+// 0 if the pointer is nil.
+func Uint32Value(v *uint32) uint32 {
+ if v != nil {
+ return *v
+ }
+ return 0
+}
+
+// Uint32Slice converts a slice of uint32 values into a slice of
+// uint32 pointers
+func Uint32Slice(src []uint32) []*uint32 {
+ dst := make([]*uint32, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// Uint32ValueSlice converts a slice of uint32 pointers into a slice of
+// uint32 values
+func Uint32ValueSlice(src []*uint32) []uint32 {
+ dst := make([]uint32, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// Uint32Map converts a string map of uint32 values into a string
+// map of uint32 pointers
+func Uint32Map(src map[string]uint32) map[string]*uint32 {
+ dst := make(map[string]*uint32)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// Uint32ValueMap converts a string map of uint32 pointers into a string
+// map of uint32 values
+func Uint32ValueMap(src map[string]*uint32) map[string]uint32 {
+ dst := make(map[string]uint32)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
+
+// Uint64 returns a pointer to of the uint64 value passed in.
+func Uint64(v uint64) *uint64 {
+ return &v
+}
+
+// Uint64Value returns the value of the uint64 pointer passed in or
+// 0 if the pointer is nil.
+func Uint64Value(v *uint64) uint64 {
+ if v != nil {
+ return *v
+ }
+ return 0
+}
+
+// Uint64Slice converts a slice of uint64 values into a slice of
+// uint64 pointers
+func Uint64Slice(src []uint64) []*uint64 {
+ dst := make([]*uint64, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// Uint64ValueSlice converts a slice of uint64 pointers into a slice of
+// uint64 values
+func Uint64ValueSlice(src []*uint64) []uint64 {
+ dst := make([]uint64, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// Uint64Map converts a string map of uint64 values into a string
+// map of uint64 pointers
+func Uint64Map(src map[string]uint64) map[string]*uint64 {
+ dst := make(map[string]*uint64)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// Uint64ValueMap converts a string map of uint64 pointers into a string
+// map of uint64 values
+func Uint64ValueMap(src map[string]*uint64) map[string]uint64 {
+ dst := make(map[string]uint64)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
+
+// Float32 returns a pointer to of the float32 value passed in.
+func Float32(v float32) *float32 {
+ return &v
+}
+
+// Float32Value returns the value of the float32 pointer passed in or
+// 0 if the pointer is nil.
+func Float32Value(v *float32) float32 {
+ if v != nil {
+ return *v
+ }
+
+ return 0
+}
+
+// Float32Slice converts a slice of float32 values into a slice of
+// float32 pointers
+func Float32Slice(src []float32) []*float32 {
+ dst := make([]*float32, len(src))
+
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+
+ return dst
+}
+
+// Float32ValueSlice converts a slice of float32 pointers into a slice of
+// float32 values
+func Float32ValueSlice(src []*float32) []float32 {
+ dst := make([]float32, len(src))
+
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+
+ return dst
+}
+
+// Float32Map converts a string map of float32 values into a string
+// map of float32 pointers
+func Float32Map(src map[string]float32) map[string]*float32 {
+ dst := make(map[string]*float32)
+
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+
+ return dst
+}
+
+// Float32ValueMap converts a string map of float32 pointers into a string
+// map of float32 values
+func Float32ValueMap(src map[string]*float32) map[string]float32 {
+ dst := make(map[string]float32)
+
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+
+ return dst
+}
+
+// Float64 returns a pointer to of the float64 value passed in.
+func Float64(v float64) *float64 {
+ return &v
+}
+
+// Float64Value returns the value of the float64 pointer passed in or
+// 0 if the pointer is nil.
+func Float64Value(v *float64) float64 {
+ if v != nil {
+ return *v
+ }
+ return 0
+}
+
+// Float64Slice converts a slice of float64 values into a slice of
+// float64 pointers
+func Float64Slice(src []float64) []*float64 {
+ dst := make([]*float64, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// Float64ValueSlice converts a slice of float64 pointers into a slice of
+// float64 values
+func Float64ValueSlice(src []*float64) []float64 {
+ dst := make([]float64, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// Float64Map converts a string map of float64 values into a string
+// map of float64 pointers
+func Float64Map(src map[string]float64) map[string]*float64 {
+ dst := make(map[string]*float64)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// Float64ValueMap converts a string map of float64 pointers into a string
+// map of float64 values
+func Float64ValueMap(src map[string]*float64) map[string]float64 {
+ dst := make(map[string]float64)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
+
+// Time returns a pointer to of the time.Time value passed in.
+func Time(v time.Time) *time.Time {
+ return &v
+}
+
+// TimeValue returns the value of the time.Time pointer passed in or
+// time.Time{} if the pointer is nil.
+func TimeValue(v *time.Time) time.Time {
+ if v != nil {
+ return *v
+ }
+ return time.Time{}
+}
+
+// TimeSlice converts a slice of time.Time values into a slice of
+// time.Time pointers
+func TimeSlice(src []time.Time) []*time.Time {
+ dst := make([]*time.Time, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// TimeValueSlice converts a slice of time.Time pointers into a slice of
+// time.Time values
+func TimeValueSlice(src []*time.Time) []time.Time {
+ dst := make([]time.Time, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// TimeMap converts a string map of time.Time values into a string
+// map of time.Time pointers
+func TimeMap(src map[string]time.Time) map[string]*time.Time {
+ dst := make(map[string]*time.Time)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// TimeValueMap converts a string map of time.Time pointers into a string
+// map of time.Time values
+func TimeValueMap(src map[string]*time.Time) map[string]time.Time {
+ dst := make(map[string]time.Time)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
diff --git a/vendor/github.com/go-openapi/swag/doc.go b/vendor/github.com/go-openapi/swag/doc.go
new file mode 100644
index 00000000..55094cb7
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/doc.go
@@ -0,0 +1,31 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/*
+Package swag contains a bunch of helper functions for go-openapi and go-swagger projects.
+
+You may also use it standalone for your projects.
+
+ - convert between value and pointers for builtin types
+ - convert from string to builtin types (wraps strconv)
+ - fast json concatenation
+ - search in path
+ - load from file or http
+ - name mangling
+
+This repo has only few dependencies outside of the standard library:
+
+ - YAML utilities depend on gopkg.in/yaml.v2
+*/
+package swag
diff --git a/vendor/github.com/go-openapi/swag/file.go b/vendor/github.com/go-openapi/swag/file.go
new file mode 100644
index 00000000..16accc55
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/file.go
@@ -0,0 +1,33 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import "mime/multipart"
+
+// File represents an uploaded file.
+type File struct {
+ Data multipart.File
+ Header *multipart.FileHeader
+}
+
+// Read bytes from the file
+func (f *File) Read(p []byte) (n int, err error) {
+ return f.Data.Read(p)
+}
+
+// Close the file
+func (f *File) Close() error {
+ return f.Data.Close()
+}
diff --git a/vendor/github.com/go-openapi/swag/initialism_index.go b/vendor/github.com/go-openapi/swag/initialism_index.go
new file mode 100644
index 00000000..2b2e4631
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/initialism_index.go
@@ -0,0 +1,202 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import (
+ "sort"
+ "strings"
+ "sync"
+)
+
+var (
+ // commonInitialisms are common acronyms that are kept as whole uppercased words.
+ commonInitialisms *indexOfInitialisms
+
+ // initialisms is a slice of sorted initialisms
+ initialisms []string
+
+ // a copy of initialisms pre-baked as []rune
+ initialismsRunes [][]rune
+ initialismsUpperCased [][]rune
+
+ isInitialism func(string) bool
+
+ maxAllocMatches int
+)
+
+func init() {
+ // Taken from https://github.com/golang/lint/blob/3390df4df2787994aea98de825b964ac7944b817/lint.go#L732-L769
+ configuredInitialisms := map[string]bool{
+ "ACL": true,
+ "API": true,
+ "ASCII": true,
+ "CPU": true,
+ "CSS": true,
+ "DNS": true,
+ "EOF": true,
+ "GUID": true,
+ "HTML": true,
+ "HTTPS": true,
+ "HTTP": true,
+ "ID": true,
+ "IP": true,
+ "IPv4": true,
+ "IPv6": true,
+ "JSON": true,
+ "LHS": true,
+ "OAI": true,
+ "QPS": true,
+ "RAM": true,
+ "RHS": true,
+ "RPC": true,
+ "SLA": true,
+ "SMTP": true,
+ "SQL": true,
+ "SSH": true,
+ "TCP": true,
+ "TLS": true,
+ "TTL": true,
+ "UDP": true,
+ "UI": true,
+ "UID": true,
+ "UUID": true,
+ "URI": true,
+ "URL": true,
+ "UTF8": true,
+ "VM": true,
+ "XML": true,
+ "XMPP": true,
+ "XSRF": true,
+ "XSS": true,
+ }
+
+ // a thread-safe index of initialisms
+ commonInitialisms = newIndexOfInitialisms().load(configuredInitialisms)
+ initialisms = commonInitialisms.sorted()
+ initialismsRunes = asRunes(initialisms)
+ initialismsUpperCased = asUpperCased(initialisms)
+ maxAllocMatches = maxAllocHeuristic(initialismsRunes)
+
+ // a test function
+ isInitialism = commonInitialisms.isInitialism
+}
+
+func asRunes(in []string) [][]rune {
+ out := make([][]rune, len(in))
+ for i, initialism := range in {
+ out[i] = []rune(initialism)
+ }
+
+ return out
+}
+
+func asUpperCased(in []string) [][]rune {
+ out := make([][]rune, len(in))
+
+ for i, initialism := range in {
+ out[i] = []rune(upper(trim(initialism)))
+ }
+
+ return out
+}
+
+func maxAllocHeuristic(in [][]rune) int {
+ heuristic := make(map[rune]int)
+ for _, initialism := range in {
+ heuristic[initialism[0]]++
+ }
+
+ var maxAlloc int
+ for _, val := range heuristic {
+ if val > maxAlloc {
+ maxAlloc = val
+ }
+ }
+
+ return maxAlloc
+}
+
+// AddInitialisms add additional initialisms
+func AddInitialisms(words ...string) {
+ for _, word := range words {
+ // commonInitialisms[upper(word)] = true
+ commonInitialisms.add(upper(word))
+ }
+ // sort again
+ initialisms = commonInitialisms.sorted()
+ initialismsRunes = asRunes(initialisms)
+ initialismsUpperCased = asUpperCased(initialisms)
+}
+
+// indexOfInitialisms is a thread-safe implementation of the sorted index of initialisms.
+// Since go1.9, this may be implemented with sync.Map.
+type indexOfInitialisms struct {
+ sortMutex *sync.Mutex
+ index *sync.Map
+}
+
+func newIndexOfInitialisms() *indexOfInitialisms {
+ return &indexOfInitialisms{
+ sortMutex: new(sync.Mutex),
+ index: new(sync.Map),
+ }
+}
+
+func (m *indexOfInitialisms) load(initial map[string]bool) *indexOfInitialisms {
+ m.sortMutex.Lock()
+ defer m.sortMutex.Unlock()
+ for k, v := range initial {
+ m.index.Store(k, v)
+ }
+ return m
+}
+
+func (m *indexOfInitialisms) isInitialism(key string) bool {
+ _, ok := m.index.Load(key)
+ return ok
+}
+
+func (m *indexOfInitialisms) add(key string) *indexOfInitialisms {
+ m.index.Store(key, true)
+ return m
+}
+
+func (m *indexOfInitialisms) sorted() (result []string) {
+ m.sortMutex.Lock()
+ defer m.sortMutex.Unlock()
+ m.index.Range(func(key, value interface{}) bool {
+ k := key.(string)
+ result = append(result, k)
+ return true
+ })
+ sort.Sort(sort.Reverse(byInitialism(result)))
+ return
+}
+
+type byInitialism []string
+
+func (s byInitialism) Len() int {
+ return len(s)
+}
+func (s byInitialism) Swap(i, j int) {
+ s[i], s[j] = s[j], s[i]
+}
+func (s byInitialism) Less(i, j int) bool {
+ if len(s[i]) != len(s[j]) {
+ return len(s[i]) < len(s[j])
+ }
+
+ return strings.Compare(s[i], s[j]) > 0
+}
diff --git a/vendor/github.com/go-openapi/swag/json.go b/vendor/github.com/go-openapi/swag/json.go
new file mode 100644
index 00000000..7e9902ca
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/json.go
@@ -0,0 +1,312 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import (
+ "bytes"
+ "encoding/json"
+ "log"
+ "reflect"
+ "strings"
+ "sync"
+
+ "github.com/mailru/easyjson/jlexer"
+ "github.com/mailru/easyjson/jwriter"
+)
+
+// nullJSON represents a JSON object with null type
+var nullJSON = []byte("null")
+
+// DefaultJSONNameProvider the default cache for types
+var DefaultJSONNameProvider = NewNameProvider()
+
+const comma = byte(',')
+
+var closers map[byte]byte
+
+func init() {
+ closers = map[byte]byte{
+ '{': '}',
+ '[': ']',
+ }
+}
+
+type ejMarshaler interface {
+ MarshalEasyJSON(w *jwriter.Writer)
+}
+
+type ejUnmarshaler interface {
+ UnmarshalEasyJSON(w *jlexer.Lexer)
+}
+
+// WriteJSON writes json data, prefers finding an appropriate interface to short-circuit the marshaler
+// so it takes the fastest option available.
+func WriteJSON(data interface{}) ([]byte, error) {
+ if d, ok := data.(ejMarshaler); ok {
+ jw := new(jwriter.Writer)
+ d.MarshalEasyJSON(jw)
+ return jw.BuildBytes()
+ }
+ if d, ok := data.(json.Marshaler); ok {
+ return d.MarshalJSON()
+ }
+ return json.Marshal(data)
+}
+
+// ReadJSON reads json data, prefers finding an appropriate interface to short-circuit the unmarshaler
+// so it takes the fastest option available
+func ReadJSON(data []byte, value interface{}) error {
+ trimmedData := bytes.Trim(data, "\x00")
+ if d, ok := value.(ejUnmarshaler); ok {
+ jl := &jlexer.Lexer{Data: trimmedData}
+ d.UnmarshalEasyJSON(jl)
+ return jl.Error()
+ }
+ if d, ok := value.(json.Unmarshaler); ok {
+ return d.UnmarshalJSON(trimmedData)
+ }
+ return json.Unmarshal(trimmedData, value)
+}
+
+// DynamicJSONToStruct converts an untyped json structure into a struct
+func DynamicJSONToStruct(data interface{}, target interface{}) error {
+ // TODO: convert straight to a json typed map (mergo + iterate?)
+ b, err := WriteJSON(data)
+ if err != nil {
+ return err
+ }
+ return ReadJSON(b, target)
+}
+
+// ConcatJSON concatenates multiple json objects efficiently
+func ConcatJSON(blobs ...[]byte) []byte {
+ if len(blobs) == 0 {
+ return nil
+ }
+
+ last := len(blobs) - 1
+ for blobs[last] == nil || bytes.Equal(blobs[last], nullJSON) {
+ // strips trailing null objects
+ last--
+ if last < 0 {
+ // there was nothing but "null"s or nil...
+ return nil
+ }
+ }
+ if last == 0 {
+ return blobs[0]
+ }
+
+ var opening, closing byte
+ var idx, a int
+ buf := bytes.NewBuffer(nil)
+
+ for i, b := range blobs[:last+1] {
+ if b == nil || bytes.Equal(b, nullJSON) {
+ // a null object is in the list: skip it
+ continue
+ }
+ if len(b) > 0 && opening == 0 { // is this an array or an object?
+ opening, closing = b[0], closers[b[0]]
+ }
+
+ if opening != '{' && opening != '[' {
+ continue // don't know how to concatenate non container objects
+ }
+
+ if len(b) < 3 { // yep empty but also the last one, so closing this thing
+ if i == last && a > 0 {
+ if err := buf.WriteByte(closing); err != nil {
+ log.Println(err)
+ }
+ }
+ continue
+ }
+
+ idx = 0
+ if a > 0 { // we need to join with a comma for everything beyond the first non-empty item
+ if err := buf.WriteByte(comma); err != nil {
+ log.Println(err)
+ }
+ idx = 1 // this is not the first or the last so we want to drop the leading bracket
+ }
+
+ if i != last { // not the last one, strip brackets
+ if _, err := buf.Write(b[idx : len(b)-1]); err != nil {
+ log.Println(err)
+ }
+ } else { // last one, strip only the leading bracket
+ if _, err := buf.Write(b[idx:]); err != nil {
+ log.Println(err)
+ }
+ }
+ a++
+ }
+ // somehow it ended up being empty, so provide a default value
+ if buf.Len() == 0 {
+ if err := buf.WriteByte(opening); err != nil {
+ log.Println(err)
+ }
+ if err := buf.WriteByte(closing); err != nil {
+ log.Println(err)
+ }
+ }
+ return buf.Bytes()
+}
+
+// ToDynamicJSON turns an object into a properly JSON typed structure
+func ToDynamicJSON(data interface{}) interface{} {
+ // TODO: convert straight to a json typed map (mergo + iterate?)
+ b, err := json.Marshal(data)
+ if err != nil {
+ log.Println(err)
+ }
+ var res interface{}
+ if err := json.Unmarshal(b, &res); err != nil {
+ log.Println(err)
+ }
+ return res
+}
+
+// FromDynamicJSON turns an object into a properly JSON typed structure
+func FromDynamicJSON(data, target interface{}) error {
+ b, err := json.Marshal(data)
+ if err != nil {
+ log.Println(err)
+ }
+ return json.Unmarshal(b, target)
+}
+
+// NameProvider represents an object capable of translating from go property names
+// to json property names
+// This type is thread-safe.
+type NameProvider struct {
+ lock *sync.Mutex
+ index map[reflect.Type]nameIndex
+}
+
+type nameIndex struct {
+ jsonNames map[string]string
+ goNames map[string]string
+}
+
+// NewNameProvider creates a new name provider
+func NewNameProvider() *NameProvider {
+ return &NameProvider{
+ lock: &sync.Mutex{},
+ index: make(map[reflect.Type]nameIndex),
+ }
+}
+
+func buildnameIndex(tpe reflect.Type, idx, reverseIdx map[string]string) {
+ for i := 0; i < tpe.NumField(); i++ {
+ targetDes := tpe.Field(i)
+
+ if targetDes.PkgPath != "" { // unexported
+ continue
+ }
+
+ if targetDes.Anonymous { // walk embedded structures tree down first
+ buildnameIndex(targetDes.Type, idx, reverseIdx)
+ continue
+ }
+
+ if tag := targetDes.Tag.Get("json"); tag != "" {
+
+ parts := strings.Split(tag, ",")
+ if len(parts) == 0 {
+ continue
+ }
+
+ nm := parts[0]
+ if nm == "-" {
+ continue
+ }
+ if nm == "" { // empty string means we want to use the Go name
+ nm = targetDes.Name
+ }
+
+ idx[nm] = targetDes.Name
+ reverseIdx[targetDes.Name] = nm
+ }
+ }
+}
+
+func newNameIndex(tpe reflect.Type) nameIndex {
+ var idx = make(map[string]string, tpe.NumField())
+ var reverseIdx = make(map[string]string, tpe.NumField())
+
+ buildnameIndex(tpe, idx, reverseIdx)
+ return nameIndex{jsonNames: idx, goNames: reverseIdx}
+}
+
+// GetJSONNames gets all the json property names for a type
+func (n *NameProvider) GetJSONNames(subject interface{}) []string {
+ n.lock.Lock()
+ defer n.lock.Unlock()
+ tpe := reflect.Indirect(reflect.ValueOf(subject)).Type()
+ names, ok := n.index[tpe]
+ if !ok {
+ names = n.makeNameIndex(tpe)
+ }
+
+ res := make([]string, 0, len(names.jsonNames))
+ for k := range names.jsonNames {
+ res = append(res, k)
+ }
+ return res
+}
+
+// GetJSONName gets the json name for a go property name
+func (n *NameProvider) GetJSONName(subject interface{}, name string) (string, bool) {
+ tpe := reflect.Indirect(reflect.ValueOf(subject)).Type()
+ return n.GetJSONNameForType(tpe, name)
+}
+
+// GetJSONNameForType gets the json name for a go property name on a given type
+func (n *NameProvider) GetJSONNameForType(tpe reflect.Type, name string) (string, bool) {
+ n.lock.Lock()
+ defer n.lock.Unlock()
+ names, ok := n.index[tpe]
+ if !ok {
+ names = n.makeNameIndex(tpe)
+ }
+ nme, ok := names.goNames[name]
+ return nme, ok
+}
+
+func (n *NameProvider) makeNameIndex(tpe reflect.Type) nameIndex {
+ names := newNameIndex(tpe)
+ n.index[tpe] = names
+ return names
+}
+
+// GetGoName gets the go name for a json property name
+func (n *NameProvider) GetGoName(subject interface{}, name string) (string, bool) {
+ tpe := reflect.Indirect(reflect.ValueOf(subject)).Type()
+ return n.GetGoNameForType(tpe, name)
+}
+
+// GetGoNameForType gets the go name for a given type for a json property name
+func (n *NameProvider) GetGoNameForType(tpe reflect.Type, name string) (string, bool) {
+ n.lock.Lock()
+ defer n.lock.Unlock()
+ names, ok := n.index[tpe]
+ if !ok {
+ names = n.makeNameIndex(tpe)
+ }
+ nme, ok := names.jsonNames[name]
+ return nme, ok
+}
diff --git a/vendor/github.com/go-openapi/swag/loading.go b/vendor/github.com/go-openapi/swag/loading.go
new file mode 100644
index 00000000..783442fd
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/loading.go
@@ -0,0 +1,176 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import (
+ "fmt"
+ "io"
+ "log"
+ "net/http"
+ "net/url"
+ "os"
+ "path"
+ "path/filepath"
+ "runtime"
+ "strings"
+ "time"
+)
+
+// LoadHTTPTimeout the default timeout for load requests
+var LoadHTTPTimeout = 30 * time.Second
+
+// LoadHTTPBasicAuthUsername the username to use when load requests require basic auth
+var LoadHTTPBasicAuthUsername = ""
+
+// LoadHTTPBasicAuthPassword the password to use when load requests require basic auth
+var LoadHTTPBasicAuthPassword = ""
+
+// LoadHTTPCustomHeaders an optional collection of custom HTTP headers for load requests
+var LoadHTTPCustomHeaders = map[string]string{}
+
+// LoadFromFileOrHTTP loads the bytes from a file or a remote http server based on the path passed in
+func LoadFromFileOrHTTP(pth string) ([]byte, error) {
+ return LoadStrategy(pth, os.ReadFile, loadHTTPBytes(LoadHTTPTimeout))(pth)
+}
+
+// LoadFromFileOrHTTPWithTimeout loads the bytes from a file or a remote http server based on the path passed in
+// timeout arg allows for per request overriding of the request timeout
+func LoadFromFileOrHTTPWithTimeout(pth string, timeout time.Duration) ([]byte, error) {
+ return LoadStrategy(pth, os.ReadFile, loadHTTPBytes(timeout))(pth)
+}
+
+// LoadStrategy returns a loader function for a given path or URI.
+//
+// The load strategy returns the remote load for any path starting with `http`.
+// So this works for any URI with a scheme `http` or `https`.
+//
+// The fallback strategy is to call the local loader.
+//
+// The local loader takes a local file system path (absolute or relative) as argument,
+// or alternatively a `file://...` URI, **without host** (see also below for windows).
+//
+// There are a few liberalities, initially intended to be tolerant regarding the URI syntax,
+// especially on windows.
+//
+// Before the local loader is called, the given path is transformed:
+// - percent-encoded characters are unescaped
+// - simple paths (e.g. `./folder/file`) are passed as-is
+// - on windows, occurrences of `/` are replaced by `\`, so providing a relative path such a `folder/file` works too.
+//
+// For paths provided as URIs with the "file" scheme, please note that:
+// - `file://` is simply stripped.
+// This means that the host part of the URI is not parsed at all.
+// For example, `file:///folder/file" becomes "/folder/file`,
+// but `file://localhost/folder/file` becomes `localhost/folder/file` on unix systems.
+// Similarly, `file://./folder/file` yields `./folder/file`.
+// - on windows, `file://...` can take a host so as to specify an UNC share location.
+//
+// Reminder about windows-specifics:
+// - `file://host/folder/file` becomes an UNC path like `\\host\folder\file` (no port specification is supported)
+// - `file:///c:/folder/file` becomes `C:\folder\file`
+// - `file://c:/folder/file` is tolerated (without leading `/`) and becomes `c:\folder\file`
+func LoadStrategy(pth string, local, remote func(string) ([]byte, error)) func(string) ([]byte, error) {
+ if strings.HasPrefix(pth, "http") {
+ return remote
+ }
+
+ return func(p string) ([]byte, error) {
+ upth, err := url.PathUnescape(p)
+ if err != nil {
+ return nil, err
+ }
+
+ if !strings.HasPrefix(p, `file://`) {
+ // regular file path provided: just normalize slashes
+ return local(filepath.FromSlash(upth))
+ }
+
+ if runtime.GOOS != "windows" {
+ // crude processing: this leaves full URIs with a host with a (mostly) unexpected result
+ upth = strings.TrimPrefix(upth, `file://`)
+
+ return local(filepath.FromSlash(upth))
+ }
+
+ // windows-only pre-processing of file://... URIs
+
+ // support for canonical file URIs on windows.
+ u, err := url.Parse(filepath.ToSlash(upth))
+ if err != nil {
+ return nil, err
+ }
+
+ if u.Host != "" {
+ // assume UNC name (volume share)
+ // NOTE: UNC port not yet supported
+
+ // when the "host" segment is a drive letter:
+ // file://C:/folder/... => C:\folder
+ upth = path.Clean(strings.Join([]string{u.Host, u.Path}, `/`))
+ if !strings.HasSuffix(u.Host, ":") && u.Host[0] != '.' {
+ // tolerance: if we have a leading dot, this can't be a host
+ // file://host/share/folder\... ==> \\host\share\path\folder
+ upth = "//" + upth
+ }
+ } else {
+ // no host, let's figure out if this is a drive letter
+ upth = strings.TrimPrefix(upth, `file://`)
+ first, _, _ := strings.Cut(strings.TrimPrefix(u.Path, "/"), "/")
+ if strings.HasSuffix(first, ":") {
+ // drive letter in the first segment:
+ // file:///c:/folder/... ==> strip the leading slash
+ upth = strings.TrimPrefix(upth, `/`)
+ }
+ }
+
+ return local(filepath.FromSlash(upth))
+ }
+}
+
+func loadHTTPBytes(timeout time.Duration) func(path string) ([]byte, error) {
+ return func(path string) ([]byte, error) {
+ client := &http.Client{Timeout: timeout}
+ req, err := http.NewRequest(http.MethodGet, path, nil) //nolint:noctx
+ if err != nil {
+ return nil, err
+ }
+
+ if LoadHTTPBasicAuthUsername != "" && LoadHTTPBasicAuthPassword != "" {
+ req.SetBasicAuth(LoadHTTPBasicAuthUsername, LoadHTTPBasicAuthPassword)
+ }
+
+ for key, val := range LoadHTTPCustomHeaders {
+ req.Header.Set(key, val)
+ }
+
+ resp, err := client.Do(req)
+ defer func() {
+ if resp != nil {
+ if e := resp.Body.Close(); e != nil {
+ log.Println(e)
+ }
+ }
+ }()
+ if err != nil {
+ return nil, err
+ }
+
+ if resp.StatusCode != http.StatusOK {
+ return nil, fmt.Errorf("could not access document at %q [%s] ", path, resp.Status)
+ }
+
+ return io.ReadAll(resp.Body)
+ }
+}
diff --git a/vendor/github.com/go-openapi/swag/name_lexem.go b/vendor/github.com/go-openapi/swag/name_lexem.go
new file mode 100644
index 00000000..8bb64ac3
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/name_lexem.go
@@ -0,0 +1,93 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import (
+ "unicode"
+ "unicode/utf8"
+)
+
+type (
+ lexemKind uint8
+
+ nameLexem struct {
+ original string
+ matchedInitialism string
+ kind lexemKind
+ }
+)
+
+const (
+ lexemKindCasualName lexemKind = iota
+ lexemKindInitialismName
+)
+
+func newInitialismNameLexem(original, matchedInitialism string) nameLexem {
+ return nameLexem{
+ kind: lexemKindInitialismName,
+ original: original,
+ matchedInitialism: matchedInitialism,
+ }
+}
+
+func newCasualNameLexem(original string) nameLexem {
+ return nameLexem{
+ kind: lexemKindCasualName,
+ original: original,
+ }
+}
+
+func (l nameLexem) GetUnsafeGoName() string {
+ if l.kind == lexemKindInitialismName {
+ return l.matchedInitialism
+ }
+
+ var (
+ first rune
+ rest string
+ )
+
+ for i, orig := range l.original {
+ if i == 0 {
+ first = orig
+ continue
+ }
+
+ if i > 0 {
+ rest = l.original[i:]
+ break
+ }
+ }
+
+ if len(l.original) > 1 {
+ b := poolOfBuffers.BorrowBuffer(utf8.UTFMax + len(rest))
+ defer func() {
+ poolOfBuffers.RedeemBuffer(b)
+ }()
+ b.WriteRune(unicode.ToUpper(first))
+ b.WriteString(lower(rest))
+ return b.String()
+ }
+
+ return l.original
+}
+
+func (l nameLexem) GetOriginal() string {
+ return l.original
+}
+
+func (l nameLexem) IsInitialism() bool {
+ return l.kind == lexemKindInitialismName
+}
diff --git a/vendor/github.com/go-openapi/swag/net.go b/vendor/github.com/go-openapi/swag/net.go
new file mode 100644
index 00000000..821235f8
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/net.go
@@ -0,0 +1,38 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import (
+ "net"
+ "strconv"
+)
+
+// SplitHostPort splits a network address into a host and a port.
+// The port is -1 when there is no port to be found
+func SplitHostPort(addr string) (host string, port int, err error) {
+ h, p, err := net.SplitHostPort(addr)
+ if err != nil {
+ return "", -1, err
+ }
+ if p == "" {
+ return "", -1, &net.AddrError{Err: "missing port in address", Addr: addr}
+ }
+
+ pi, err := strconv.Atoi(p)
+ if err != nil {
+ return "", -1, err
+ }
+ return h, pi, nil
+}
diff --git a/vendor/github.com/go-openapi/swag/path.go b/vendor/github.com/go-openapi/swag/path.go
new file mode 100644
index 00000000..941bd017
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/path.go
@@ -0,0 +1,59 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import (
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+)
+
+const (
+ // GOPATHKey represents the env key for gopath
+ GOPATHKey = "GOPATH"
+)
+
+// FindInSearchPath finds a package in a provided lists of paths
+func FindInSearchPath(searchPath, pkg string) string {
+ pathsList := filepath.SplitList(searchPath)
+ for _, path := range pathsList {
+ if evaluatedPath, err := filepath.EvalSymlinks(filepath.Join(path, "src", pkg)); err == nil {
+ if _, err := os.Stat(evaluatedPath); err == nil {
+ return evaluatedPath
+ }
+ }
+ }
+ return ""
+}
+
+// FindInGoSearchPath finds a package in the $GOPATH:$GOROOT
+func FindInGoSearchPath(pkg string) string {
+ return FindInSearchPath(FullGoSearchPath(), pkg)
+}
+
+// FullGoSearchPath gets the search paths for finding packages
+func FullGoSearchPath() string {
+ allPaths := os.Getenv(GOPATHKey)
+ if allPaths == "" {
+ allPaths = filepath.Join(os.Getenv("HOME"), "go")
+ }
+ if allPaths != "" {
+ allPaths = strings.Join([]string{allPaths, runtime.GOROOT()}, ":")
+ } else {
+ allPaths = runtime.GOROOT()
+ }
+ return allPaths
+}
diff --git a/vendor/github.com/go-openapi/swag/split.go b/vendor/github.com/go-openapi/swag/split.go
new file mode 100644
index 00000000..274727a8
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/split.go
@@ -0,0 +1,508 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import (
+ "bytes"
+ "sync"
+ "unicode"
+ "unicode/utf8"
+)
+
+type (
+ splitter struct {
+ initialisms []string
+ initialismsRunes [][]rune
+ initialismsUpperCased [][]rune // initialisms cached in their trimmed, upper-cased version
+ postSplitInitialismCheck bool
+ }
+
+ splitterOption func(*splitter)
+
+ initialismMatch struct {
+ body []rune
+ start, end int
+ complete bool
+ }
+ initialismMatches []initialismMatch
+)
+
+type (
+ // memory pools of temporary objects.
+ //
+ // These are used to recycle temporarily allocated objects
+ // and relieve the GC from undue pressure.
+
+ matchesPool struct {
+ *sync.Pool
+ }
+
+ buffersPool struct {
+ *sync.Pool
+ }
+
+ lexemsPool struct {
+ *sync.Pool
+ }
+
+ splittersPool struct {
+ *sync.Pool
+ }
+)
+
+var (
+ // poolOfMatches holds temporary slices for recycling during the initialism match process
+ poolOfMatches = matchesPool{
+ Pool: &sync.Pool{
+ New: func() any {
+ s := make(initialismMatches, 0, maxAllocMatches)
+
+ return &s
+ },
+ },
+ }
+
+ poolOfBuffers = buffersPool{
+ Pool: &sync.Pool{
+ New: func() any {
+ return new(bytes.Buffer)
+ },
+ },
+ }
+
+ poolOfLexems = lexemsPool{
+ Pool: &sync.Pool{
+ New: func() any {
+ s := make([]nameLexem, 0, maxAllocMatches)
+
+ return &s
+ },
+ },
+ }
+
+ poolOfSplitters = splittersPool{
+ Pool: &sync.Pool{
+ New: func() any {
+ s := newSplitter()
+
+ return &s
+ },
+ },
+ }
+)
+
+// nameReplaceTable finds a word representation for special characters.
+func nameReplaceTable(r rune) (string, bool) {
+ switch r {
+ case '@':
+ return "At ", true
+ case '&':
+ return "And ", true
+ case '|':
+ return "Pipe ", true
+ case '$':
+ return "Dollar ", true
+ case '!':
+ return "Bang ", true
+ case '-':
+ return "", true
+ case '_':
+ return "", true
+ default:
+ return "", false
+ }
+}
+
+// split calls the splitter.
+//
+// Use newSplitter for more control and options
+func split(str string) []string {
+ s := poolOfSplitters.BorrowSplitter()
+ lexems := s.split(str)
+ result := make([]string, 0, len(*lexems))
+
+ for _, lexem := range *lexems {
+ result = append(result, lexem.GetOriginal())
+ }
+ poolOfLexems.RedeemLexems(lexems)
+ poolOfSplitters.RedeemSplitter(s)
+
+ return result
+
+}
+
+func newSplitter(options ...splitterOption) splitter {
+ s := splitter{
+ postSplitInitialismCheck: false,
+ initialisms: initialisms,
+ initialismsRunes: initialismsRunes,
+ initialismsUpperCased: initialismsUpperCased,
+ }
+
+ for _, option := range options {
+ option(&s)
+ }
+
+ return s
+}
+
+// withPostSplitInitialismCheck allows to catch initialisms after main split process
+func withPostSplitInitialismCheck(s *splitter) {
+ s.postSplitInitialismCheck = true
+}
+
+func (p matchesPool) BorrowMatches() *initialismMatches {
+ s := p.Get().(*initialismMatches)
+ *s = (*s)[:0] // reset slice, keep allocated capacity
+
+ return s
+}
+
+func (p buffersPool) BorrowBuffer(size int) *bytes.Buffer {
+ s := p.Get().(*bytes.Buffer)
+ s.Reset()
+
+ if s.Cap() < size {
+ s.Grow(size)
+ }
+
+ return s
+}
+
+func (p lexemsPool) BorrowLexems() *[]nameLexem {
+ s := p.Get().(*[]nameLexem)
+ *s = (*s)[:0] // reset slice, keep allocated capacity
+
+ return s
+}
+
+func (p splittersPool) BorrowSplitter(options ...splitterOption) *splitter {
+ s := p.Get().(*splitter)
+ s.postSplitInitialismCheck = false // reset options
+ for _, apply := range options {
+ apply(s)
+ }
+
+ return s
+}
+
+func (p matchesPool) RedeemMatches(s *initialismMatches) {
+ p.Put(s)
+}
+
+func (p buffersPool) RedeemBuffer(s *bytes.Buffer) {
+ p.Put(s)
+}
+
+func (p lexemsPool) RedeemLexems(s *[]nameLexem) {
+ p.Put(s)
+}
+
+func (p splittersPool) RedeemSplitter(s *splitter) {
+ p.Put(s)
+}
+
+func (m initialismMatch) isZero() bool {
+ return m.start == 0 && m.end == 0
+}
+
+func (s splitter) split(name string) *[]nameLexem {
+ nameRunes := []rune(name)
+ matches := s.gatherInitialismMatches(nameRunes)
+ if matches == nil {
+ return poolOfLexems.BorrowLexems()
+ }
+
+ return s.mapMatchesToNameLexems(nameRunes, matches)
+}
+
+func (s splitter) gatherInitialismMatches(nameRunes []rune) *initialismMatches {
+ var matches *initialismMatches
+
+ for currentRunePosition, currentRune := range nameRunes {
+ // recycle these allocations as we loop over runes
+ // with such recycling, only 2 slices should be allocated per call
+ // instead of o(n).
+ newMatches := poolOfMatches.BorrowMatches()
+
+ // check current initialism matches
+ if matches != nil { // skip first iteration
+ for _, match := range *matches {
+ if keepCompleteMatch := match.complete; keepCompleteMatch {
+ *newMatches = append(*newMatches, match)
+ continue
+ }
+
+ // drop failed match
+ currentMatchRune := match.body[currentRunePosition-match.start]
+ if currentMatchRune != currentRune {
+ continue
+ }
+
+ // try to complete ongoing match
+ if currentRunePosition-match.start == len(match.body)-1 {
+ // we are close; the next step is to check the symbol ahead
+ // if it is a small letter, then it is not the end of match
+ // but beginning of the next word
+
+ if currentRunePosition < len(nameRunes)-1 {
+ nextRune := nameRunes[currentRunePosition+1]
+ if newWord := unicode.IsLower(nextRune); newWord {
+ // oh ok, it was the start of a new word
+ continue
+ }
+ }
+
+ match.complete = true
+ match.end = currentRunePosition
+ }
+
+ *newMatches = append(*newMatches, match)
+ }
+ }
+
+ // check for new initialism matches
+ for i := range s.initialisms {
+ initialismRunes := s.initialismsRunes[i]
+ if initialismRunes[0] == currentRune {
+ *newMatches = append(*newMatches, initialismMatch{
+ start: currentRunePosition,
+ body: initialismRunes,
+ complete: false,
+ })
+ }
+ }
+
+ if matches != nil {
+ poolOfMatches.RedeemMatches(matches)
+ }
+ matches = newMatches
+ }
+
+ // up to the caller to redeem this last slice
+ return matches
+}
+
+func (s splitter) mapMatchesToNameLexems(nameRunes []rune, matches *initialismMatches) *[]nameLexem {
+ nameLexems := poolOfLexems.BorrowLexems()
+
+ var lastAcceptedMatch initialismMatch
+ for _, match := range *matches {
+ if !match.complete {
+ continue
+ }
+
+ if firstMatch := lastAcceptedMatch.isZero(); firstMatch {
+ s.appendBrokenDownCasualString(nameLexems, nameRunes[:match.start])
+ *nameLexems = append(*nameLexems, s.breakInitialism(string(match.body)))
+
+ lastAcceptedMatch = match
+
+ continue
+ }
+
+ if overlappedMatch := match.start <= lastAcceptedMatch.end; overlappedMatch {
+ continue
+ }
+
+ middle := nameRunes[lastAcceptedMatch.end+1 : match.start]
+ s.appendBrokenDownCasualString(nameLexems, middle)
+ *nameLexems = append(*nameLexems, s.breakInitialism(string(match.body)))
+
+ lastAcceptedMatch = match
+ }
+
+ // we have not found any accepted matches
+ if lastAcceptedMatch.isZero() {
+ *nameLexems = (*nameLexems)[:0]
+ s.appendBrokenDownCasualString(nameLexems, nameRunes)
+ } else if lastAcceptedMatch.end+1 != len(nameRunes) {
+ rest := nameRunes[lastAcceptedMatch.end+1:]
+ s.appendBrokenDownCasualString(nameLexems, rest)
+ }
+
+ poolOfMatches.RedeemMatches(matches)
+
+ return nameLexems
+}
+
+func (s splitter) breakInitialism(original string) nameLexem {
+ return newInitialismNameLexem(original, original)
+}
+
+func (s splitter) appendBrokenDownCasualString(segments *[]nameLexem, str []rune) {
+ currentSegment := poolOfBuffers.BorrowBuffer(len(str)) // unlike strings.Builder, bytes.Buffer initial storage can reused
+ defer func() {
+ poolOfBuffers.RedeemBuffer(currentSegment)
+ }()
+
+ addCasualNameLexem := func(original string) {
+ *segments = append(*segments, newCasualNameLexem(original))
+ }
+
+ addInitialismNameLexem := func(original, match string) {
+ *segments = append(*segments, newInitialismNameLexem(original, match))
+ }
+
+ var addNameLexem func(string)
+ if s.postSplitInitialismCheck {
+ addNameLexem = func(original string) {
+ for i := range s.initialisms {
+ if isEqualFoldIgnoreSpace(s.initialismsUpperCased[i], original) {
+ addInitialismNameLexem(original, s.initialisms[i])
+
+ return
+ }
+ }
+
+ addCasualNameLexem(original)
+ }
+ } else {
+ addNameLexem = addCasualNameLexem
+ }
+
+ for _, rn := range str {
+ if replace, found := nameReplaceTable(rn); found {
+ if currentSegment.Len() > 0 {
+ addNameLexem(currentSegment.String())
+ currentSegment.Reset()
+ }
+
+ if replace != "" {
+ addNameLexem(replace)
+ }
+
+ continue
+ }
+
+ if !unicode.In(rn, unicode.L, unicode.M, unicode.N, unicode.Pc) {
+ if currentSegment.Len() > 0 {
+ addNameLexem(currentSegment.String())
+ currentSegment.Reset()
+ }
+
+ continue
+ }
+
+ if unicode.IsUpper(rn) {
+ if currentSegment.Len() > 0 {
+ addNameLexem(currentSegment.String())
+ }
+ currentSegment.Reset()
+ }
+
+ currentSegment.WriteRune(rn)
+ }
+
+ if currentSegment.Len() > 0 {
+ addNameLexem(currentSegment.String())
+ }
+}
+
+// isEqualFoldIgnoreSpace is the same as strings.EqualFold, but
+// it ignores leading and trailing blank spaces in the compared
+// string.
+//
+// base is assumed to be composed of upper-cased runes, and be already
+// trimmed.
+//
+// This code is heavily inspired from strings.EqualFold.
+func isEqualFoldIgnoreSpace(base []rune, str string) bool {
+ var i, baseIndex int
+ // equivalent to b := []byte(str), but without data copy
+ b := hackStringBytes(str)
+
+ for i < len(b) {
+ if c := b[i]; c < utf8.RuneSelf {
+ // fast path for ASCII
+ if c != ' ' && c != '\t' {
+ break
+ }
+ i++
+
+ continue
+ }
+
+ // unicode case
+ r, size := utf8.DecodeRune(b[i:])
+ if !unicode.IsSpace(r) {
+ break
+ }
+ i += size
+ }
+
+ if i >= len(b) {
+ return len(base) == 0
+ }
+
+ for _, baseRune := range base {
+ if i >= len(b) {
+ break
+ }
+
+ if c := b[i]; c < utf8.RuneSelf {
+ // single byte rune case (ASCII)
+ if baseRune >= utf8.RuneSelf {
+ return false
+ }
+
+ baseChar := byte(baseRune)
+ if c != baseChar &&
+ !('a' <= c && c <= 'z' && c-'a'+'A' == baseChar) {
+ return false
+ }
+
+ baseIndex++
+ i++
+
+ continue
+ }
+
+ // unicode case
+ r, size := utf8.DecodeRune(b[i:])
+ if unicode.ToUpper(r) != baseRune {
+ return false
+ }
+ baseIndex++
+ i += size
+ }
+
+ if baseIndex != len(base) {
+ return false
+ }
+
+ // all passed: now we should only have blanks
+ for i < len(b) {
+ if c := b[i]; c < utf8.RuneSelf {
+ // fast path for ASCII
+ if c != ' ' && c != '\t' {
+ return false
+ }
+ i++
+
+ continue
+ }
+
+ // unicode case
+ r, size := utf8.DecodeRune(b[i:])
+ if !unicode.IsSpace(r) {
+ return false
+ }
+
+ i += size
+ }
+
+ return true
+}
diff --git a/vendor/github.com/go-openapi/swag/string_bytes.go b/vendor/github.com/go-openapi/swag/string_bytes.go
new file mode 100644
index 00000000..c52d6bf7
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/string_bytes.go
@@ -0,0 +1,22 @@
+package swag
+
+import "unsafe"
+
+type internalString struct {
+ Data unsafe.Pointer
+ Len int
+}
+
+// hackStringBytes returns the (unsafe) underlying bytes slice of a string.
+func hackStringBytes(str string) []byte {
+ p := (*internalString)(unsafe.Pointer(&str)).Data
+ return unsafe.Slice((*byte)(p), len(str))
+}
+
+/*
+ * go1.20 version (for when go mod moves to a go1.20 requirement):
+
+func hackStringBytes(str string) []byte {
+ return unsafe.Slice(unsafe.StringData(str), len(str))
+}
+*/
diff --git a/vendor/github.com/go-openapi/swag/util.go b/vendor/github.com/go-openapi/swag/util.go
new file mode 100644
index 00000000..5051401c
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/util.go
@@ -0,0 +1,364 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import (
+ "reflect"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+// GoNamePrefixFunc sets an optional rule to prefix go names
+// which do not start with a letter.
+//
+// The prefix function is assumed to return a string that starts with an upper case letter.
+//
+// e.g. to help convert "123" into "{prefix}123"
+//
+// The default is to prefix with "X"
+var GoNamePrefixFunc func(string) string
+
+func prefixFunc(name, in string) string {
+ if GoNamePrefixFunc == nil {
+ return "X" + in
+ }
+
+ return GoNamePrefixFunc(name) + in
+}
+
+const (
+ // collectionFormatComma = "csv"
+ collectionFormatSpace = "ssv"
+ collectionFormatTab = "tsv"
+ collectionFormatPipe = "pipes"
+ collectionFormatMulti = "multi"
+)
+
+// JoinByFormat joins a string array by a known format (e.g. swagger's collectionFormat attribute):
+//
+// ssv: space separated value
+// tsv: tab separated value
+// pipes: pipe (|) separated value
+// csv: comma separated value (default)
+func JoinByFormat(data []string, format string) []string {
+ if len(data) == 0 {
+ return data
+ }
+ var sep string
+ switch format {
+ case collectionFormatSpace:
+ sep = " "
+ case collectionFormatTab:
+ sep = "\t"
+ case collectionFormatPipe:
+ sep = "|"
+ case collectionFormatMulti:
+ return data
+ default:
+ sep = ","
+ }
+ return []string{strings.Join(data, sep)}
+}
+
+// SplitByFormat splits a string by a known format:
+//
+// ssv: space separated value
+// tsv: tab separated value
+// pipes: pipe (|) separated value
+// csv: comma separated value (default)
+func SplitByFormat(data, format string) []string {
+ if data == "" {
+ return nil
+ }
+ var sep string
+ switch format {
+ case collectionFormatSpace:
+ sep = " "
+ case collectionFormatTab:
+ sep = "\t"
+ case collectionFormatPipe:
+ sep = "|"
+ case collectionFormatMulti:
+ return nil
+ default:
+ sep = ","
+ }
+ var result []string
+ for _, s := range strings.Split(data, sep) {
+ if ts := strings.TrimSpace(s); ts != "" {
+ result = append(result, ts)
+ }
+ }
+ return result
+}
+
+// Removes leading whitespaces
+func trim(str string) string {
+ return strings.TrimSpace(str)
+}
+
+// Shortcut to strings.ToUpper()
+func upper(str string) string {
+ return strings.ToUpper(trim(str))
+}
+
+// Shortcut to strings.ToLower()
+func lower(str string) string {
+ return strings.ToLower(trim(str))
+}
+
+// Camelize an uppercased word
+func Camelize(word string) string {
+ camelized := poolOfBuffers.BorrowBuffer(len(word))
+ defer func() {
+ poolOfBuffers.RedeemBuffer(camelized)
+ }()
+
+ for pos, ru := range []rune(word) {
+ if pos > 0 {
+ camelized.WriteRune(unicode.ToLower(ru))
+ } else {
+ camelized.WriteRune(unicode.ToUpper(ru))
+ }
+ }
+ return camelized.String()
+}
+
+// ToFileName lowercases and underscores a go type name
+func ToFileName(name string) string {
+ in := split(name)
+ out := make([]string, 0, len(in))
+
+ for _, w := range in {
+ out = append(out, lower(w))
+ }
+
+ return strings.Join(out, "_")
+}
+
+// ToCommandName lowercases and underscores a go type name
+func ToCommandName(name string) string {
+ in := split(name)
+ out := make([]string, 0, len(in))
+
+ for _, w := range in {
+ out = append(out, lower(w))
+ }
+ return strings.Join(out, "-")
+}
+
+// ToHumanNameLower represents a code name as a human series of words
+func ToHumanNameLower(name string) string {
+ s := poolOfSplitters.BorrowSplitter(withPostSplitInitialismCheck)
+ in := s.split(name)
+ poolOfSplitters.RedeemSplitter(s)
+ out := make([]string, 0, len(*in))
+
+ for _, w := range *in {
+ if !w.IsInitialism() {
+ out = append(out, lower(w.GetOriginal()))
+ } else {
+ out = append(out, trim(w.GetOriginal()))
+ }
+ }
+ poolOfLexems.RedeemLexems(in)
+
+ return strings.Join(out, " ")
+}
+
+// ToHumanNameTitle represents a code name as a human series of words with the first letters titleized
+func ToHumanNameTitle(name string) string {
+ s := poolOfSplitters.BorrowSplitter(withPostSplitInitialismCheck)
+ in := s.split(name)
+ poolOfSplitters.RedeemSplitter(s)
+
+ out := make([]string, 0, len(*in))
+ for _, w := range *in {
+ original := trim(w.GetOriginal())
+ if !w.IsInitialism() {
+ out = append(out, Camelize(original))
+ } else {
+ out = append(out, original)
+ }
+ }
+ poolOfLexems.RedeemLexems(in)
+
+ return strings.Join(out, " ")
+}
+
+// ToJSONName camelcases a name which can be underscored or pascal cased
+func ToJSONName(name string) string {
+ in := split(name)
+ out := make([]string, 0, len(in))
+
+ for i, w := range in {
+ if i == 0 {
+ out = append(out, lower(w))
+ continue
+ }
+ out = append(out, Camelize(trim(w)))
+ }
+ return strings.Join(out, "")
+}
+
+// ToVarName camelcases a name which can be underscored or pascal cased
+func ToVarName(name string) string {
+ res := ToGoName(name)
+ if isInitialism(res) {
+ return lower(res)
+ }
+ if len(res) <= 1 {
+ return lower(res)
+ }
+ return lower(res[:1]) + res[1:]
+}
+
+// ToGoName translates a swagger name which can be underscored or camel cased to a name that golint likes
+func ToGoName(name string) string {
+ s := poolOfSplitters.BorrowSplitter(withPostSplitInitialismCheck)
+ lexems := s.split(name)
+ poolOfSplitters.RedeemSplitter(s)
+ defer func() {
+ poolOfLexems.RedeemLexems(lexems)
+ }()
+ lexemes := *lexems
+
+ if len(lexemes) == 0 {
+ return ""
+ }
+
+ result := poolOfBuffers.BorrowBuffer(len(name))
+ defer func() {
+ poolOfBuffers.RedeemBuffer(result)
+ }()
+
+ // check if not starting with a letter, upper case
+ firstPart := lexemes[0].GetUnsafeGoName()
+ if lexemes[0].IsInitialism() {
+ firstPart = upper(firstPart)
+ }
+
+ if c := firstPart[0]; c < utf8.RuneSelf {
+ // ASCII
+ switch {
+ case 'A' <= c && c <= 'Z':
+ result.WriteString(firstPart)
+ case 'a' <= c && c <= 'z':
+ result.WriteByte(c - 'a' + 'A')
+ result.WriteString(firstPart[1:])
+ default:
+ result.WriteString(prefixFunc(name, firstPart))
+ // NOTE: no longer check if prefixFunc returns a string that starts with uppercase:
+ // assume this is always the case
+ }
+ } else {
+ // unicode
+ firstRune, _ := utf8.DecodeRuneInString(firstPart)
+ switch {
+ case !unicode.IsLetter(firstRune):
+ result.WriteString(prefixFunc(name, firstPart))
+ case !unicode.IsUpper(firstRune):
+ result.WriteString(prefixFunc(name, firstPart))
+ /*
+ result.WriteRune(unicode.ToUpper(firstRune))
+ result.WriteString(firstPart[offset:])
+ */
+ default:
+ result.WriteString(firstPart)
+ }
+ }
+
+ for _, lexem := range lexemes[1:] {
+ goName := lexem.GetUnsafeGoName()
+
+ // to support old behavior
+ if lexem.IsInitialism() {
+ goName = upper(goName)
+ }
+ result.WriteString(goName)
+ }
+
+ return result.String()
+}
+
+// ContainsStrings searches a slice of strings for a case-sensitive match
+func ContainsStrings(coll []string, item string) bool {
+ for _, a := range coll {
+ if a == item {
+ return true
+ }
+ }
+ return false
+}
+
+// ContainsStringsCI searches a slice of strings for a case-insensitive match
+func ContainsStringsCI(coll []string, item string) bool {
+ for _, a := range coll {
+ if strings.EqualFold(a, item) {
+ return true
+ }
+ }
+ return false
+}
+
+type zeroable interface {
+ IsZero() bool
+}
+
+// IsZero returns true when the value passed into the function is a zero value.
+// This allows for safer checking of interface values.
+func IsZero(data interface{}) bool {
+ v := reflect.ValueOf(data)
+ // check for nil data
+ switch v.Kind() { //nolint:exhaustive
+ case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
+ if v.IsNil() {
+ return true
+ }
+ }
+
+ // check for things that have an IsZero method instead
+ if vv, ok := data.(zeroable); ok {
+ return vv.IsZero()
+ }
+
+ // continue with slightly more complex reflection
+ switch v.Kind() { //nolint:exhaustive
+ case reflect.String:
+ return v.Len() == 0
+ case reflect.Bool:
+ return !v.Bool()
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return v.Int() == 0
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return v.Uint() == 0
+ case reflect.Float32, reflect.Float64:
+ return v.Float() == 0
+ case reflect.Struct, reflect.Array:
+ return reflect.DeepEqual(data, reflect.Zero(v.Type()).Interface())
+ case reflect.Invalid:
+ return true
+ default:
+ return false
+ }
+}
+
+// CommandLineOptionsGroup represents a group of user-defined command line options
+type CommandLineOptionsGroup struct {
+ ShortDescription string
+ LongDescription string
+ Options interface{}
+}
diff --git a/vendor/github.com/go-openapi/swag/yaml.go b/vendor/github.com/go-openapi/swag/yaml.go
new file mode 100644
index 00000000..a8c4e359
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/yaml.go
@@ -0,0 +1,480 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import (
+ "encoding/json"
+ "fmt"
+ "path/filepath"
+ "reflect"
+ "sort"
+ "strconv"
+
+ "github.com/mailru/easyjson/jlexer"
+ "github.com/mailru/easyjson/jwriter"
+ yaml "gopkg.in/yaml.v3"
+)
+
+// YAMLMatcher matches yaml
+func YAMLMatcher(path string) bool {
+ ext := filepath.Ext(path)
+ return ext == ".yaml" || ext == ".yml"
+}
+
+// YAMLToJSON converts YAML unmarshaled data into json compatible data
+func YAMLToJSON(data interface{}) (json.RawMessage, error) {
+ jm, err := transformData(data)
+ if err != nil {
+ return nil, err
+ }
+ b, err := WriteJSON(jm)
+ return json.RawMessage(b), err
+}
+
+// BytesToYAMLDoc converts a byte slice into a YAML document
+func BytesToYAMLDoc(data []byte) (interface{}, error) {
+ var document yaml.Node // preserve order that is present in the document
+ if err := yaml.Unmarshal(data, &document); err != nil {
+ return nil, err
+ }
+ if document.Kind != yaml.DocumentNode || len(document.Content) != 1 || document.Content[0].Kind != yaml.MappingNode {
+ return nil, fmt.Errorf("only YAML documents that are objects are supported")
+ }
+ return &document, nil
+}
+
+func yamlNode(root *yaml.Node) (interface{}, error) {
+ switch root.Kind {
+ case yaml.DocumentNode:
+ return yamlDocument(root)
+ case yaml.SequenceNode:
+ return yamlSequence(root)
+ case yaml.MappingNode:
+ return yamlMapping(root)
+ case yaml.ScalarNode:
+ return yamlScalar(root)
+ case yaml.AliasNode:
+ return yamlNode(root.Alias)
+ default:
+ return nil, fmt.Errorf("unsupported YAML node type: %v", root.Kind)
+ }
+}
+
+func yamlDocument(node *yaml.Node) (interface{}, error) {
+ if len(node.Content) != 1 {
+ return nil, fmt.Errorf("unexpected YAML Document node content length: %d", len(node.Content))
+ }
+ return yamlNode(node.Content[0])
+}
+
+func yamlMapping(node *yaml.Node) (interface{}, error) {
+ m := make(JSONMapSlice, len(node.Content)/2)
+
+ var j int
+ for i := 0; i < len(node.Content); i += 2 {
+ var nmi JSONMapItem
+ k, err := yamlStringScalarC(node.Content[i])
+ if err != nil {
+ return nil, fmt.Errorf("unable to decode YAML map key: %w", err)
+ }
+ nmi.Key = k
+ v, err := yamlNode(node.Content[i+1])
+ if err != nil {
+ return nil, fmt.Errorf("unable to process YAML map value for key %q: %w", k, err)
+ }
+ nmi.Value = v
+ m[j] = nmi
+ j++
+ }
+ return m, nil
+}
+
+func yamlSequence(node *yaml.Node) (interface{}, error) {
+ s := make([]interface{}, 0)
+
+ for i := 0; i < len(node.Content); i++ {
+
+ v, err := yamlNode(node.Content[i])
+ if err != nil {
+ return nil, fmt.Errorf("unable to decode YAML sequence value: %w", err)
+ }
+ s = append(s, v)
+ }
+ return s, nil
+}
+
+const ( // See https://yaml.org/type/
+ yamlStringScalar = "tag:yaml.org,2002:str"
+ yamlIntScalar = "tag:yaml.org,2002:int"
+ yamlBoolScalar = "tag:yaml.org,2002:bool"
+ yamlFloatScalar = "tag:yaml.org,2002:float"
+ yamlTimestamp = "tag:yaml.org,2002:timestamp"
+ yamlNull = "tag:yaml.org,2002:null"
+)
+
+func yamlScalar(node *yaml.Node) (interface{}, error) {
+ switch node.LongTag() {
+ case yamlStringScalar:
+ return node.Value, nil
+ case yamlBoolScalar:
+ b, err := strconv.ParseBool(node.Value)
+ if err != nil {
+ return nil, fmt.Errorf("unable to process scalar node. Got %q. Expecting bool content: %w", node.Value, err)
+ }
+ return b, nil
+ case yamlIntScalar:
+ i, err := strconv.ParseInt(node.Value, 10, 64)
+ if err != nil {
+ return nil, fmt.Errorf("unable to process scalar node. Got %q. Expecting integer content: %w", node.Value, err)
+ }
+ return i, nil
+ case yamlFloatScalar:
+ f, err := strconv.ParseFloat(node.Value, 64)
+ if err != nil {
+ return nil, fmt.Errorf("unable to process scalar node. Got %q. Expecting float content: %w", node.Value, err)
+ }
+ return f, nil
+ case yamlTimestamp:
+ return node.Value, nil
+ case yamlNull:
+ return nil, nil //nolint:nilnil
+ default:
+ return nil, fmt.Errorf("YAML tag %q is not supported", node.LongTag())
+ }
+}
+
+func yamlStringScalarC(node *yaml.Node) (string, error) {
+ if node.Kind != yaml.ScalarNode {
+ return "", fmt.Errorf("expecting a string scalar but got %q", node.Kind)
+ }
+ switch node.LongTag() {
+ case yamlStringScalar, yamlIntScalar, yamlFloatScalar:
+ return node.Value, nil
+ default:
+ return "", fmt.Errorf("YAML tag %q is not supported as map key", node.LongTag())
+ }
+}
+
+// JSONMapSlice represent a JSON object, with the order of keys maintained
+type JSONMapSlice []JSONMapItem
+
+// MarshalJSON renders a JSONMapSlice as JSON
+func (s JSONMapSlice) MarshalJSON() ([]byte, error) {
+ w := &jwriter.Writer{Flags: jwriter.NilMapAsEmpty | jwriter.NilSliceAsEmpty}
+ s.MarshalEasyJSON(w)
+ return w.BuildBytes()
+}
+
+// MarshalEasyJSON renders a JSONMapSlice as JSON, using easyJSON
+func (s JSONMapSlice) MarshalEasyJSON(w *jwriter.Writer) {
+ w.RawByte('{')
+
+ ln := len(s)
+ last := ln - 1
+ for i := 0; i < ln; i++ {
+ s[i].MarshalEasyJSON(w)
+ if i != last { // last item
+ w.RawByte(',')
+ }
+ }
+
+ w.RawByte('}')
+}
+
+// UnmarshalJSON makes a JSONMapSlice from JSON
+func (s *JSONMapSlice) UnmarshalJSON(data []byte) error {
+ l := jlexer.Lexer{Data: data}
+ s.UnmarshalEasyJSON(&l)
+ return l.Error()
+}
+
+// UnmarshalEasyJSON makes a JSONMapSlice from JSON, using easyJSON
+func (s *JSONMapSlice) UnmarshalEasyJSON(in *jlexer.Lexer) {
+ if in.IsNull() {
+ in.Skip()
+ return
+ }
+
+ var result JSONMapSlice
+ in.Delim('{')
+ for !in.IsDelim('}') {
+ var mi JSONMapItem
+ mi.UnmarshalEasyJSON(in)
+ result = append(result, mi)
+ }
+ *s = result
+}
+
+func (s JSONMapSlice) MarshalYAML() (interface{}, error) {
+ var n yaml.Node
+ n.Kind = yaml.DocumentNode
+ var nodes []*yaml.Node
+ for _, item := range s {
+ nn, err := json2yaml(item.Value)
+ if err != nil {
+ return nil, err
+ }
+ ns := []*yaml.Node{
+ {
+ Kind: yaml.ScalarNode,
+ Tag: yamlStringScalar,
+ Value: item.Key,
+ },
+ nn,
+ }
+ nodes = append(nodes, ns...)
+ }
+
+ n.Content = []*yaml.Node{
+ {
+ Kind: yaml.MappingNode,
+ Content: nodes,
+ },
+ }
+
+ return yaml.Marshal(&n)
+}
+
+func isNil(input interface{}) bool {
+ if input == nil {
+ return true
+ }
+ kind := reflect.TypeOf(input).Kind()
+ switch kind { //nolint:exhaustive
+ case reflect.Ptr, reflect.Map, reflect.Slice, reflect.Chan:
+ return reflect.ValueOf(input).IsNil()
+ default:
+ return false
+ }
+}
+
+func json2yaml(item interface{}) (*yaml.Node, error) {
+ if isNil(item) {
+ return &yaml.Node{
+ Kind: yaml.ScalarNode,
+ Value: "null",
+ }, nil
+ }
+
+ switch val := item.(type) {
+ case JSONMapSlice:
+ var n yaml.Node
+ n.Kind = yaml.MappingNode
+ for i := range val {
+ childNode, err := json2yaml(&val[i].Value)
+ if err != nil {
+ return nil, err
+ }
+ n.Content = append(n.Content, &yaml.Node{
+ Kind: yaml.ScalarNode,
+ Tag: yamlStringScalar,
+ Value: val[i].Key,
+ }, childNode)
+ }
+ return &n, nil
+ case map[string]interface{}:
+ var n yaml.Node
+ n.Kind = yaml.MappingNode
+ keys := make([]string, 0, len(val))
+ for k := range val {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+
+ for _, k := range keys {
+ v := val[k]
+ childNode, err := json2yaml(v)
+ if err != nil {
+ return nil, err
+ }
+ n.Content = append(n.Content, &yaml.Node{
+ Kind: yaml.ScalarNode,
+ Tag: yamlStringScalar,
+ Value: k,
+ }, childNode)
+ }
+ return &n, nil
+ case []interface{}:
+ var n yaml.Node
+ n.Kind = yaml.SequenceNode
+ for i := range val {
+ childNode, err := json2yaml(val[i])
+ if err != nil {
+ return nil, err
+ }
+ n.Content = append(n.Content, childNode)
+ }
+ return &n, nil
+ case string:
+ return &yaml.Node{
+ Kind: yaml.ScalarNode,
+ Tag: yamlStringScalar,
+ Value: val,
+ }, nil
+ case float64:
+ return &yaml.Node{
+ Kind: yaml.ScalarNode,
+ Tag: yamlFloatScalar,
+ Value: strconv.FormatFloat(val, 'f', -1, 64),
+ }, nil
+ case int64:
+ return &yaml.Node{
+ Kind: yaml.ScalarNode,
+ Tag: yamlIntScalar,
+ Value: strconv.FormatInt(val, 10),
+ }, nil
+ case uint64:
+ return &yaml.Node{
+ Kind: yaml.ScalarNode,
+ Tag: yamlIntScalar,
+ Value: strconv.FormatUint(val, 10),
+ }, nil
+ case bool:
+ return &yaml.Node{
+ Kind: yaml.ScalarNode,
+ Tag: yamlBoolScalar,
+ Value: strconv.FormatBool(val),
+ }, nil
+ default:
+ return nil, fmt.Errorf("unhandled type: %T", val)
+ }
+}
+
+// JSONMapItem represents the value of a key in a JSON object held by JSONMapSlice
+type JSONMapItem struct {
+ Key string
+ Value interface{}
+}
+
+// MarshalJSON renders a JSONMapItem as JSON
+func (s JSONMapItem) MarshalJSON() ([]byte, error) {
+ w := &jwriter.Writer{Flags: jwriter.NilMapAsEmpty | jwriter.NilSliceAsEmpty}
+ s.MarshalEasyJSON(w)
+ return w.BuildBytes()
+}
+
+// MarshalEasyJSON renders a JSONMapItem as JSON, using easyJSON
+func (s JSONMapItem) MarshalEasyJSON(w *jwriter.Writer) {
+ w.String(s.Key)
+ w.RawByte(':')
+ w.Raw(WriteJSON(s.Value))
+}
+
+// UnmarshalJSON makes a JSONMapItem from JSON
+func (s *JSONMapItem) UnmarshalJSON(data []byte) error {
+ l := jlexer.Lexer{Data: data}
+ s.UnmarshalEasyJSON(&l)
+ return l.Error()
+}
+
+// UnmarshalEasyJSON makes a JSONMapItem from JSON, using easyJSON
+func (s *JSONMapItem) UnmarshalEasyJSON(in *jlexer.Lexer) {
+ key := in.UnsafeString()
+ in.WantColon()
+ value := in.Interface()
+ in.WantComma()
+ s.Key = key
+ s.Value = value
+}
+
+func transformData(input interface{}) (out interface{}, err error) {
+ format := func(t interface{}) (string, error) {
+ switch k := t.(type) {
+ case string:
+ return k, nil
+ case uint:
+ return strconv.FormatUint(uint64(k), 10), nil
+ case uint8:
+ return strconv.FormatUint(uint64(k), 10), nil
+ case uint16:
+ return strconv.FormatUint(uint64(k), 10), nil
+ case uint32:
+ return strconv.FormatUint(uint64(k), 10), nil
+ case uint64:
+ return strconv.FormatUint(k, 10), nil
+ case int:
+ return strconv.Itoa(k), nil
+ case int8:
+ return strconv.FormatInt(int64(k), 10), nil
+ case int16:
+ return strconv.FormatInt(int64(k), 10), nil
+ case int32:
+ return strconv.FormatInt(int64(k), 10), nil
+ case int64:
+ return strconv.FormatInt(k, 10), nil
+ default:
+ return "", fmt.Errorf("unexpected map key type, got: %T", k)
+ }
+ }
+
+ switch in := input.(type) {
+ case yaml.Node:
+ return yamlNode(&in)
+ case *yaml.Node:
+ return yamlNode(in)
+ case map[interface{}]interface{}:
+ o := make(JSONMapSlice, 0, len(in))
+ for ke, va := range in {
+ var nmi JSONMapItem
+ if nmi.Key, err = format(ke); err != nil {
+ return nil, err
+ }
+
+ v, ert := transformData(va)
+ if ert != nil {
+ return nil, ert
+ }
+ nmi.Value = v
+ o = append(o, nmi)
+ }
+ return o, nil
+ case []interface{}:
+ len1 := len(in)
+ o := make([]interface{}, len1)
+ for i := 0; i < len1; i++ {
+ o[i], err = transformData(in[i])
+ if err != nil {
+ return nil, err
+ }
+ }
+ return o, nil
+ }
+ return input, nil
+}
+
+// YAMLDoc loads a yaml document from either http or a file and converts it to json
+func YAMLDoc(path string) (json.RawMessage, error) {
+ yamlDoc, err := YAMLData(path)
+ if err != nil {
+ return nil, err
+ }
+
+ data, err := YAMLToJSON(yamlDoc)
+ if err != nil {
+ return nil, err
+ }
+
+ return data, nil
+}
+
+// YAMLData loads a yaml document from either http or a file
+func YAMLData(path string) (interface{}, error) {
+ data, err := LoadFromFileOrHTTP(path)
+ if err != nil {
+ return nil, err
+ }
+
+ return BytesToYAMLDoc(data)
+}
diff --git a/vendor/github.com/invopop/yaml/.gitignore b/vendor/github.com/invopop/yaml/.gitignore
new file mode 100644
index 00000000..e256a31e
--- /dev/null
+++ b/vendor/github.com/invopop/yaml/.gitignore
@@ -0,0 +1,20 @@
+# OSX leaves these everywhere on SMB shares
+._*
+
+# Eclipse files
+.classpath
+.project
+.settings/**
+
+# Emacs save files
+*~
+
+# Vim-related files
+[._]*.s[a-w][a-z]
+[._]s[a-w][a-z]
+*.un~
+Session.vim
+.netrwhist
+
+# Go test binaries
+*.test
diff --git a/vendor/github.com/invopop/yaml/.golangci.toml b/vendor/github.com/invopop/yaml/.golangci.toml
new file mode 100644
index 00000000..4a438ca2
--- /dev/null
+++ b/vendor/github.com/invopop/yaml/.golangci.toml
@@ -0,0 +1,15 @@
+[run]
+timeout = "120s"
+
+[output]
+format = "colored-line-number"
+
+[linters]
+enable = [
+ "gocyclo", "unconvert", "goimports", "unused", "varcheck",
+ "vetshadow", "misspell", "nakedret", "errcheck", "revive", "ineffassign",
+ "deadcode", "goconst", "vet", "unparam", "gofmt"
+]
+
+[issues]
+exclude-use-default = false
diff --git a/vendor/github.com/invopop/yaml/LICENSE b/vendor/github.com/invopop/yaml/LICENSE
new file mode 100644
index 00000000..7805d36d
--- /dev/null
+++ b/vendor/github.com/invopop/yaml/LICENSE
@@ -0,0 +1,50 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Sam Ghods
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+
+Copyright (c) 2012 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/invopop/yaml/README.md b/vendor/github.com/invopop/yaml/README.md
new file mode 100644
index 00000000..2c33dfe5
--- /dev/null
+++ b/vendor/github.com/invopop/yaml/README.md
@@ -0,0 +1,128 @@
+# YAML marshaling and unmarshaling support for Go
+
+[![Lint](https://github.com/invopop/yaml/actions/workflows/lint.yaml/badge.svg)](https://github.com/invopop/yaml/actions/workflows/lint.yaml)
+[![Test Go](https://github.com/invopop/yaml/actions/workflows/test.yaml/badge.svg)](https://github.com/invopop/yaml/actions/workflows/test.yaml)
+[![Go Report Card](https://goreportcard.com/badge/github.com/invopop/yaml)](https://goreportcard.com/report/github.com/invopop/yaml)
+![Latest Tag](https://img.shields.io/github/v/tag/invopop/yaml)
+
+## Introduction
+
+A wrapper around [go-yaml](https://github.com/go-yaml/yaml) designed to enable a better way of handling YAML when marshaling to and from structs.
+
+This is a fork and split of the original [ghodss/yaml](https://github.com/ghodss/yaml) repository which no longer appears to be maintained.
+
+In short, this library first converts YAML to JSON using go-yaml and then uses `json.Marshal` and `json.Unmarshal` to convert to or from the struct. This means that it effectively reuses the JSON struct tags as well as the custom JSON methods `MarshalJSON` and `UnmarshalJSON` unlike go-yaml. For a detailed overview of the rationale behind this method, [see this blog post](https://web.archive.org/web/20150812020634/http://ghodss.com/2014/the-right-way-to-handle-yaml-in-golang/).
+
+## Compatibility
+
+This package uses [go-yaml](https://github.com/go-yaml/yaml) and therefore supports [everything go-yaml supports](https://github.com/go-yaml/yaml#compatibility).
+
+Tested against Go versions 1.14 and onwards.
+
+## Caveats
+
+**Caveat #1:** When using `yaml.Marshal` and `yaml.Unmarshal`, binary data should NOT be preceded with the `!!binary` YAML tag. If you do, go-yaml will convert the binary data from base64 to native binary data, which is not compatible with JSON. You can still use binary in your YAML files though - just store them without the `!!binary` tag and decode the base64 in your code (e.g. in the custom JSON methods `MarshalJSON` and `UnmarshalJSON`). This also has the benefit that your YAML and your JSON binary data will be decoded exactly the same way. As an example:
+
+```
+BAD:
+ exampleKey: !!binary gIGC
+
+GOOD:
+ exampleKey: gIGC
+... and decode the base64 data in your code.
+```
+
+**Caveat #2:** When using `YAMLToJSON` directly, maps with keys that are maps will result in an error since this is not supported by JSON. This error will occur in `Unmarshal` as well since you can't unmarshal map keys anyways since struct fields can't be keys.
+
+## Installation and usage
+
+To install, run:
+
+```
+$ go get github.com/invopop/yaml
+```
+
+And import using:
+
+```
+import "github.com/invopop/yaml"
+```
+
+Usage is very similar to the JSON library:
+
+```go
+package main
+
+import (
+ "fmt"
+
+ "github.com/invopop/yaml"
+)
+
+type Person struct {
+ Name string `json:"name"` // Affects YAML field names too.
+ Age int `json:"age"`
+}
+
+func main() {
+ // Marshal a Person struct to YAML.
+ p := Person{"John", 30}
+ y, err := yaml.Marshal(p)
+ if err != nil {
+ fmt.Printf("err: %v\n", err)
+ return
+ }
+ fmt.Println(string(y))
+ /* Output:
+ age: 30
+ name: John
+ */
+
+ // Unmarshal the YAML back into a Person struct.
+ var p2 Person
+ err = yaml.Unmarshal(y, &p2)
+ if err != nil {
+ fmt.Printf("err: %v\n", err)
+ return
+ }
+ fmt.Println(p2)
+ /* Output:
+ {John 30}
+ */
+}
+```
+
+`yaml.YAMLToJSON` and `yaml.JSONToYAML` methods are also available:
+
+```go
+package main
+
+import (
+ "fmt"
+
+ "github.com/invopop/yaml"
+)
+
+func main() {
+ j := []byte(`{"name": "John", "age": 30}`)
+ y, err := yaml.JSONToYAML(j)
+ if err != nil {
+ fmt.Printf("err: %v\n", err)
+ return
+ }
+ fmt.Println(string(y))
+ /* Output:
+ name: John
+ age: 30
+ */
+ j2, err := yaml.YAMLToJSON(y)
+ if err != nil {
+ fmt.Printf("err: %v\n", err)
+ return
+ }
+ fmt.Println(string(j2))
+ /* Output:
+ {"age":30,"name":"John"}
+ */
+}
+```
diff --git a/vendor/github.com/invopop/yaml/fields.go b/vendor/github.com/invopop/yaml/fields.go
new file mode 100644
index 00000000..52b30c6b
--- /dev/null
+++ b/vendor/github.com/invopop/yaml/fields.go
@@ -0,0 +1,498 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package yaml
+
+import (
+ "bytes"
+ "encoding"
+ "encoding/json"
+ "reflect"
+ "sort"
+ "strings"
+ "sync"
+ "unicode"
+ "unicode/utf8"
+)
+
+// indirect walks down v allocating pointers as needed,
+// until it gets to a non-pointer.
+// if it encounters an Unmarshaler, indirect stops and returns that.
+// if decodingNull is true, indirect stops at the last pointer so it can be set to nil.
+func indirect(v reflect.Value, decodingNull bool) (json.Unmarshaler, encoding.TextUnmarshaler, reflect.Value) {
+ // If v is a named type and is addressable,
+ // start with its address, so that if the type has pointer methods,
+ // we find them.
+ if v.Kind() != reflect.Ptr && v.Type().Name() != "" && v.CanAddr() {
+ v = v.Addr()
+ }
+ for {
+ // Load value from interface, but only if the result will be
+ // usefully addressable.
+ if v.Kind() == reflect.Interface && !v.IsNil() {
+ e := v.Elem()
+ if e.Kind() == reflect.Ptr && !e.IsNil() && (!decodingNull || e.Elem().Kind() == reflect.Ptr) {
+ v = e
+ continue
+ }
+ }
+
+ if v.Kind() != reflect.Ptr {
+ break
+ }
+
+ if v.Elem().Kind() != reflect.Ptr && decodingNull && v.CanSet() {
+ break
+ }
+ if v.IsNil() {
+ if v.CanSet() {
+ v.Set(reflect.New(v.Type().Elem()))
+ } else {
+ v = reflect.New(v.Type().Elem())
+ }
+ }
+ if v.Type().NumMethod() > 0 {
+ if u, ok := v.Interface().(json.Unmarshaler); ok {
+ return u, nil, reflect.Value{}
+ }
+ if u, ok := v.Interface().(encoding.TextUnmarshaler); ok {
+ return nil, u, reflect.Value{}
+ }
+ }
+ v = v.Elem()
+ }
+ return nil, nil, v
+}
+
+// A field represents a single field found in a struct.
+type field struct {
+ name string
+ nameBytes []byte // []byte(name)
+ equalFold func(s, t []byte) bool // bytes.EqualFold or equivalent
+
+ tag bool
+ index []int
+ typ reflect.Type
+ omitEmpty bool
+ quoted bool
+}
+
+func fillField(f field) field {
+ f.nameBytes = []byte(f.name)
+ f.equalFold = foldFunc(f.nameBytes)
+ return f
+}
+
+// byName sorts field by name, breaking ties with depth,
+// then breaking ties with "name came from json tag", then
+// breaking ties with index sequence.
+type byName []field
+
+func (x byName) Len() int { return len(x) }
+
+func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
+
+func (x byName) Less(i, j int) bool {
+ if x[i].name != x[j].name {
+ return x[i].name < x[j].name
+ }
+ if len(x[i].index) != len(x[j].index) {
+ return len(x[i].index) < len(x[j].index)
+ }
+ if x[i].tag != x[j].tag {
+ return x[i].tag
+ }
+ return byIndex(x).Less(i, j)
+}
+
+// byIndex sorts field by index sequence.
+type byIndex []field
+
+func (x byIndex) Len() int { return len(x) }
+
+func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
+
+func (x byIndex) Less(i, j int) bool {
+ for k, xik := range x[i].index {
+ if k >= len(x[j].index) {
+ return false
+ }
+ if xik != x[j].index[k] {
+ return xik < x[j].index[k]
+ }
+ }
+ return len(x[i].index) < len(x[j].index)
+}
+
+// typeFields returns a list of fields that JSON should recognize for the given type.
+// The algorithm is breadth-first search over the set of structs to include - the top struct
+// and then any reachable anonymous structs.
+func typeFields(t reflect.Type) []field {
+ // Anonymous fields to explore at the current level and the next.
+ current := []field{}
+ next := []field{{typ: t}}
+
+ // Count of queued names for current level and the next.
+ var count, nextCount map[reflect.Type]int
+
+ // Types already visited at an earlier level.
+ visited := map[reflect.Type]bool{}
+
+ // Fields found.
+ var fields []field
+
+ for len(next) > 0 {
+ current, next = next, current[:0]
+ count, nextCount = nextCount, map[reflect.Type]int{}
+
+ for _, f := range current {
+ if visited[f.typ] {
+ continue
+ }
+ visited[f.typ] = true
+
+ // Scan f.typ for fields to include.
+ for i := 0; i < f.typ.NumField(); i++ {
+ sf := f.typ.Field(i)
+ if sf.PkgPath != "" { // unexported
+ continue
+ }
+ tag := sf.Tag.Get("json")
+ if tag == "-" {
+ continue
+ }
+ name, opts := parseTag(tag)
+ if !isValidTag(name) {
+ name = ""
+ }
+ index := make([]int, len(f.index)+1)
+ copy(index, f.index)
+ index[len(f.index)] = i
+
+ ft := sf.Type
+ if ft.Name() == "" && ft.Kind() == reflect.Ptr {
+ // Follow pointer.
+ ft = ft.Elem()
+ }
+
+ // Record found field and index sequence.
+ if name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct {
+ tagged := name != ""
+ if name == "" {
+ name = sf.Name
+ }
+ fields = append(fields, fillField(field{
+ name: name,
+ tag: tagged,
+ index: index,
+ typ: ft,
+ omitEmpty: opts.Contains("omitempty"),
+ quoted: opts.Contains("string"),
+ }))
+ if count[f.typ] > 1 {
+ // If there were multiple instances, add a second,
+ // so that the annihilation code will see a duplicate.
+ // It only cares about the distinction between 1 or 2,
+ // so don't bother generating any more copies.
+ fields = append(fields, fields[len(fields)-1])
+ }
+ continue
+ }
+
+ // Record new anonymous struct to explore in next round.
+ nextCount[ft]++
+ if nextCount[ft] == 1 {
+ next = append(next, fillField(field{name: ft.Name(), index: index, typ: ft}))
+ }
+ }
+ }
+ }
+
+ sort.Sort(byName(fields))
+
+ // Delete all fields that are hidden by the Go rules for embedded fields,
+ // except that fields with JSON tags are promoted.
+
+ // The fields are sorted in primary order of name, secondary order
+ // of field index length. Loop over names; for each name, delete
+ // hidden fields by choosing the one dominant field that survives.
+ out := fields[:0]
+ for advance, i := 0, 0; i < len(fields); i += advance {
+ // One iteration per name.
+ // Find the sequence of fields with the name of this first field.
+ fi := fields[i]
+ name := fi.name
+ for advance = 1; i+advance < len(fields); advance++ {
+ fj := fields[i+advance]
+ if fj.name != name {
+ break
+ }
+ }
+ if advance == 1 { // Only one field with this name
+ out = append(out, fi)
+ continue
+ }
+ dominant, ok := dominantField(fields[i : i+advance])
+ if ok {
+ out = append(out, dominant)
+ }
+ }
+
+ fields = out
+ sort.Sort(byIndex(fields))
+
+ return fields
+}
+
+// dominantField looks through the fields, all of which are known to
+// have the same name, to find the single field that dominates the
+// others using Go's embedding rules, modified by the presence of
+// JSON tags. If there are multiple top-level fields, the boolean
+// will be false: This condition is an error in Go and we skip all
+// the fields.
+func dominantField(fields []field) (field, bool) {
+ // The fields are sorted in increasing index-length order. The winner
+ // must therefore be one with the shortest index length. Drop all
+ // longer entries, which is easy: just truncate the slice.
+ length := len(fields[0].index)
+ tagged := -1 // Index of first tagged field.
+ for i, f := range fields {
+ if len(f.index) > length {
+ fields = fields[:i]
+ break
+ }
+ if f.tag {
+ if tagged >= 0 {
+ // Multiple tagged fields at the same level: conflict.
+ // Return no field.
+ return field{}, false
+ }
+ tagged = i
+ }
+ }
+ if tagged >= 0 {
+ return fields[tagged], true
+ }
+ // All remaining fields have the same length. If there's more than one,
+ // we have a conflict (two fields named "X" at the same level) and we
+ // return no field.
+ if len(fields) > 1 {
+ return field{}, false
+ }
+ return fields[0], true
+}
+
+var fieldCache struct {
+ sync.RWMutex
+ m map[reflect.Type][]field
+}
+
+// cachedTypeFields is like typeFields but uses a cache to avoid repeated work.
+func cachedTypeFields(t reflect.Type) []field {
+ fieldCache.RLock()
+ f := fieldCache.m[t]
+ fieldCache.RUnlock()
+ if f != nil {
+ return f
+ }
+
+ // Compute fields without lock.
+ // Might duplicate effort but won't hold other computations back.
+ f = typeFields(t)
+ if f == nil {
+ f = []field{}
+ }
+
+ fieldCache.Lock()
+ if fieldCache.m == nil {
+ fieldCache.m = map[reflect.Type][]field{}
+ }
+ fieldCache.m[t] = f
+ fieldCache.Unlock()
+ return f
+}
+
+func isValidTag(s string) bool {
+ if s == "" {
+ return false
+ }
+ for _, c := range s {
+ switch {
+ case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c):
+ // Backslash and quote chars are reserved, but
+ // otherwise any punctuation chars are allowed
+ // in a tag name.
+ default:
+ if !unicode.IsLetter(c) && !unicode.IsDigit(c) {
+ return false
+ }
+ }
+ }
+ return true
+}
+
+const (
+ caseMask = ^byte(0x20) // Mask to ignore case in ASCII.
+ kelvin = '\u212a'
+ smallLongEss = '\u017f'
+)
+
+// foldFunc returns one of four different case folding equivalence
+// functions, from most general (and slow) to fastest:
+//
+// 1) bytes.EqualFold, if the key s contains any non-ASCII UTF-8
+// 2) equalFoldRight, if s contains special folding ASCII ('k', 'K', 's', 'S')
+// 3) asciiEqualFold, no special, but includes non-letters (including _)
+// 4) simpleLetterEqualFold, no specials, no non-letters.
+//
+// The letters S and K are special because they map to 3 runes, not just 2:
+// * S maps to s and to U+017F 'ſ' Latin small letter long s
+// * k maps to K and to U+212A 'K' Kelvin sign
+// See http://play.golang.org/p/tTxjOc0OGo
+//
+// The returned function is specialized for matching against s and
+// should only be given s. It's not curried for performance reasons.
+func foldFunc(s []byte) func(s, t []byte) bool {
+ nonLetter := false
+ special := false // special letter
+ for _, b := range s {
+ if b >= utf8.RuneSelf {
+ return bytes.EqualFold
+ }
+ upper := b & caseMask
+ if upper < 'A' || upper > 'Z' {
+ nonLetter = true
+ } else if upper == 'K' || upper == 'S' {
+ // See above for why these letters are special.
+ special = true
+ }
+ }
+ if special {
+ return equalFoldRight
+ }
+ if nonLetter {
+ return asciiEqualFold
+ }
+ return simpleLetterEqualFold
+}
+
+// equalFoldRight is a specialization of bytes.EqualFold when s is
+// known to be all ASCII (including punctuation), but contains an 's',
+// 'S', 'k', or 'K', requiring a Unicode fold on the bytes in t.
+// See comments on foldFunc.
+func equalFoldRight(s, t []byte) bool {
+ for _, sb := range s {
+ if len(t) == 0 {
+ return false
+ }
+ tb := t[0]
+ if tb < utf8.RuneSelf {
+ if sb != tb {
+ sbUpper := sb & caseMask
+ if 'A' <= sbUpper && sbUpper <= 'Z' {
+ if sbUpper != tb&caseMask {
+ return false
+ }
+ } else {
+ return false
+ }
+ }
+ t = t[1:]
+ continue
+ }
+ // sb is ASCII and t is not. t must be either kelvin
+ // sign or long s; sb must be s, S, k, or K.
+ tr, size := utf8.DecodeRune(t)
+ switch sb {
+ case 's', 'S':
+ if tr != smallLongEss {
+ return false
+ }
+ case 'k', 'K':
+ if tr != kelvin {
+ return false
+ }
+ default:
+ return false
+ }
+ t = t[size:]
+
+ }
+ return len(t) <= 0
+}
+
+// asciiEqualFold is a specialization of bytes.EqualFold for use when
+// s is all ASCII (but may contain non-letters) and contains no
+// special-folding letters.
+// See comments on foldFunc.
+func asciiEqualFold(s, t []byte) bool {
+ if len(s) != len(t) {
+ return false
+ }
+ for i, sb := range s {
+ tb := t[i]
+ if sb == tb {
+ continue
+ }
+ if ('a' <= sb && sb <= 'z') || ('A' <= sb && sb <= 'Z') {
+ if sb&caseMask != tb&caseMask {
+ return false
+ }
+ } else {
+ return false
+ }
+ }
+ return true
+}
+
+// simpleLetterEqualFold is a specialization of bytes.EqualFold for
+// use when s is all ASCII letters (no underscores, etc) and also
+// doesn't contain 'k', 'K', 's', or 'S'.
+// See comments on foldFunc.
+func simpleLetterEqualFold(s, t []byte) bool {
+ if len(s) != len(t) {
+ return false
+ }
+ for i, b := range s {
+ if b&caseMask != t[i]&caseMask {
+ return false
+ }
+ }
+ return true
+}
+
+// tagOptions is the string following a comma in a struct field's "json"
+// tag, or the empty string. It does not include the leading comma.
+type tagOptions string
+
+// parseTag splits a struct field's json tag into its name and
+// comma-separated options.
+func parseTag(tag string) (string, tagOptions) {
+ if idx := strings.Index(tag, ","); idx != -1 {
+ return tag[:idx], tagOptions(tag[idx+1:])
+ }
+ return tag, tagOptions("")
+}
+
+// Contains reports whether a comma-separated list of options
+// contains a particular substr flag. substr must be surrounded by a
+// string boundary or commas.
+func (o tagOptions) Contains(optionName string) bool {
+ if len(o) == 0 {
+ return false
+ }
+ s := string(o)
+ for s != "" {
+ var next string
+ i := strings.Index(s, ",")
+ if i >= 0 {
+ s, next = s[:i], s[i+1:]
+ }
+ if s == optionName {
+ return true
+ }
+ s = next
+ }
+ return false
+}
diff --git a/vendor/github.com/invopop/yaml/yaml.go b/vendor/github.com/invopop/yaml/yaml.go
new file mode 100644
index 00000000..805d515d
--- /dev/null
+++ b/vendor/github.com/invopop/yaml/yaml.go
@@ -0,0 +1,314 @@
+// Package yaml provides a wrapper around go-yaml designed to enable a better
+// way of handling YAML when marshaling to and from structs.
+//
+// In short, this package first converts YAML to JSON using go-yaml and then
+// uses json.Marshal and json.Unmarshal to convert to or from the struct. This
+// means that it effectively reuses the JSON struct tags as well as the custom
+// JSON methods MarshalJSON and UnmarshalJSON unlike go-yaml.
+//
+package yaml // import "github.com/invopop/yaml"
+
+import (
+ "bytes"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "reflect"
+ "strconv"
+
+ "gopkg.in/yaml.v3"
+)
+
+// Marshal the object into JSON then converts JSON to YAML and returns the
+// YAML.
+func Marshal(o interface{}) ([]byte, error) {
+ j, err := json.Marshal(o)
+ if err != nil {
+ return nil, fmt.Errorf("error marshaling into JSON: %v", err)
+ }
+
+ y, err := JSONToYAML(j)
+ if err != nil {
+ return nil, fmt.Errorf("error converting JSON to YAML: %v", err)
+ }
+
+ return y, nil
+}
+
+// JSONOpt is a decoding option for decoding from JSON format.
+type JSONOpt func(*json.Decoder) *json.Decoder
+
+// Unmarshal converts YAML to JSON then uses JSON to unmarshal into an object,
+// optionally configuring the behavior of the JSON unmarshal.
+func Unmarshal(y []byte, o interface{}, opts ...JSONOpt) error {
+ dec := yaml.NewDecoder(bytes.NewReader(y))
+ return unmarshal(dec, o, opts)
+}
+
+func unmarshal(dec *yaml.Decoder, o interface{}, opts []JSONOpt) error {
+ vo := reflect.ValueOf(o)
+ j, err := yamlToJSON(dec, &vo)
+ if err != nil {
+ return fmt.Errorf("error converting YAML to JSON: %v", err)
+ }
+
+ err = jsonUnmarshal(bytes.NewReader(j), o, opts...)
+ if err != nil {
+ return fmt.Errorf("error unmarshaling JSON: %v", err)
+ }
+
+ return nil
+}
+
+// jsonUnmarshal unmarshals the JSON byte stream from the given reader into the
+// object, optionally applying decoder options prior to decoding. We are not
+// using json.Unmarshal directly as we want the chance to pass in non-default
+// options.
+func jsonUnmarshal(r io.Reader, o interface{}, opts ...JSONOpt) error {
+ d := json.NewDecoder(r)
+ for _, opt := range opts {
+ d = opt(d)
+ }
+ if err := d.Decode(&o); err != nil {
+ return fmt.Errorf("while decoding JSON: %v", err)
+ }
+ return nil
+}
+
+// JSONToYAML converts JSON to YAML.
+func JSONToYAML(j []byte) ([]byte, error) {
+ // Convert the JSON to an object.
+ var jsonObj interface{}
+ // We are using yaml.Unmarshal here (instead of json.Unmarshal) because the
+ // Go JSON library doesn't try to pick the right number type (int, float,
+ // etc.) when unmarshalling to interface{}, it just picks float64
+ // universally. go-yaml does go through the effort of picking the right
+ // number type, so we can preserve number type throughout this process.
+ err := yaml.Unmarshal(j, &jsonObj)
+ if err != nil {
+ return nil, err
+ }
+
+ // Marshal this object into YAML.
+ return yaml.Marshal(jsonObj)
+}
+
+// YAMLToJSON converts YAML to JSON. Since JSON is a subset of YAML,
+// passing JSON through this method should be a no-op.
+//
+// Things YAML can do that are not supported by JSON:
+// * In YAML you can have binary and null keys in your maps. These are invalid
+// in JSON. (int and float keys are converted to strings.)
+// * Binary data in YAML with the !!binary tag is not supported. If you want to
+// use binary data with this library, encode the data as base64 as usual but do
+// not use the !!binary tag in your YAML. This will ensure the original base64
+// encoded data makes it all the way through to the JSON.
+//
+func YAMLToJSON(y []byte) ([]byte, error) { //nolint:revive
+ dec := yaml.NewDecoder(bytes.NewReader(y))
+ return yamlToJSON(dec, nil)
+}
+
+func yamlToJSON(dec *yaml.Decoder, jsonTarget *reflect.Value) ([]byte, error) {
+ // Convert the YAML to an object.
+ var yamlObj interface{}
+ if err := dec.Decode(&yamlObj); err != nil {
+ // Functionality changed in v3 which means we need to ignore EOF error.
+ // See https://github.com/go-yaml/yaml/issues/639
+ if !errors.Is(err, io.EOF) {
+ return nil, err
+ }
+ }
+
+ // YAML objects are not completely compatible with JSON objects (e.g. you
+ // can have non-string keys in YAML). So, convert the YAML-compatible object
+ // to a JSON-compatible object, failing with an error if irrecoverable
+ // incompatibilities happen along the way.
+ jsonObj, err := convertToJSONableObject(yamlObj, jsonTarget)
+ if err != nil {
+ return nil, err
+ }
+
+ // Convert this object to JSON and return the data.
+ return json.Marshal(jsonObj)
+}
+
+func convertToJSONableObject(yamlObj interface{}, jsonTarget *reflect.Value) (interface{}, error) { //nolint:gocyclo
+ var err error
+
+ // Resolve jsonTarget to a concrete value (i.e. not a pointer or an
+ // interface). We pass decodingNull as false because we're not actually
+ // decoding into the value, we're just checking if the ultimate target is a
+ // string.
+ if jsonTarget != nil {
+ ju, tu, pv := indirect(*jsonTarget, false)
+ // We have a JSON or Text Umarshaler at this level, so we can't be trying
+ // to decode into a string.
+ if ju != nil || tu != nil {
+ jsonTarget = nil
+ } else {
+ jsonTarget = &pv
+ }
+ }
+
+ // go-yaml v3 changed from v2 and now will provide map[string]interface{} by
+ // default and map[interface{}]interface{} when none of the keys strings.
+ // To get around this, we run a pre-loop to convert the map.
+ // JSON only supports strings as keys, so we must convert.
+
+ switch typedYAMLObj := yamlObj.(type) {
+ case map[interface{}]interface{}:
+ // From my reading of go-yaml v2 (specifically the resolve function),
+ // keys can only have the types string, int, int64, float64, binary
+ // (unsupported), or null (unsupported).
+ strMap := make(map[string]interface{})
+ for k, v := range typedYAMLObj {
+ // Resolve the key to a string first.
+ var keyString string
+ switch typedKey := k.(type) {
+ case string:
+ keyString = typedKey
+ case int:
+ keyString = strconv.Itoa(typedKey)
+ case int64:
+ // go-yaml will only return an int64 as a key if the system
+ // architecture is 32-bit and the key's value is between 32-bit
+ // and 64-bit. Otherwise the key type will simply be int.
+ keyString = strconv.FormatInt(typedKey, 10)
+ case float64:
+ // Float64 is now supported in keys
+ keyString = strconv.FormatFloat(typedKey, 'g', -1, 64)
+ case bool:
+ if typedKey {
+ keyString = "true"
+ } else {
+ keyString = "false"
+ }
+ default:
+ return nil, fmt.Errorf("unsupported map key of type: %s, key: %+#v, value: %+#v",
+ reflect.TypeOf(k), k, v)
+ }
+ strMap[keyString] = v
+ }
+ // replace yamlObj with our new string map
+ yamlObj = strMap
+ }
+
+ // If yamlObj is a number or a boolean, check if jsonTarget is a string -
+ // if so, coerce. Else return normal.
+ // If yamlObj is a map or array, find the field that each key is
+ // unmarshaling to, and when you recurse pass the reflect.Value for that
+ // field back into this function.
+ switch typedYAMLObj := yamlObj.(type) {
+ case map[string]interface{}:
+ for k, v := range typedYAMLObj {
+
+ // jsonTarget should be a struct or a map. If it's a struct, find
+ // the field it's going to map to and pass its reflect.Value. If
+ // it's a map, find the element type of the map and pass the
+ // reflect.Value created from that type. If it's neither, just pass
+ // nil - JSON conversion will error for us if it's a real issue.
+ if jsonTarget != nil {
+ t := *jsonTarget
+ if t.Kind() == reflect.Struct {
+ keyBytes := []byte(k)
+ // Find the field that the JSON library would use.
+ var f *field
+ fields := cachedTypeFields(t.Type())
+ for i := range fields {
+ ff := &fields[i]
+ if bytes.Equal(ff.nameBytes, keyBytes) {
+ f = ff
+ break
+ }
+ // Do case-insensitive comparison.
+ if f == nil && ff.equalFold(ff.nameBytes, keyBytes) {
+ f = ff
+ }
+ }
+ if f != nil {
+ // Find the reflect.Value of the most preferential
+ // struct field.
+ jtf := t.Field(f.index[0])
+ typedYAMLObj[k], err = convertToJSONableObject(v, &jtf)
+ if err != nil {
+ return nil, err
+ }
+ continue
+ }
+ } else if t.Kind() == reflect.Map {
+ // Create a zero value of the map's element type to use as
+ // the JSON target.
+ jtv := reflect.Zero(t.Type().Elem())
+ typedYAMLObj[k], err = convertToJSONableObject(v, &jtv)
+ if err != nil {
+ return nil, err
+ }
+ continue
+ }
+ }
+ typedYAMLObj[k], err = convertToJSONableObject(v, nil)
+ if err != nil {
+ return nil, err
+ }
+ }
+ return typedYAMLObj, nil
+ case []interface{}:
+ // We need to recurse into arrays in case there are any
+ // map[interface{}]interface{}'s inside and to convert any
+ // numbers to strings.
+
+ // If jsonTarget is a slice (which it really should be), find the
+ // thing it's going to map to. If it's not a slice, just pass nil
+ // - JSON conversion will error for us if it's a real issue.
+ var jsonSliceElemValue *reflect.Value
+ if jsonTarget != nil {
+ t := *jsonTarget
+ if t.Kind() == reflect.Slice {
+ // By default slices point to nil, but we need a reflect.Value
+ // pointing to a value of the slice type, so we create one here.
+ ev := reflect.Indirect(reflect.New(t.Type().Elem()))
+ jsonSliceElemValue = &ev
+ }
+ }
+
+ // Make and use a new array.
+ arr := make([]interface{}, len(typedYAMLObj))
+ for i, v := range typedYAMLObj {
+ arr[i], err = convertToJSONableObject(v, jsonSliceElemValue)
+ if err != nil {
+ return nil, err
+ }
+ }
+ return arr, nil
+ default:
+ // If the target type is a string and the YAML type is a number,
+ // convert the YAML type to a string.
+ if jsonTarget != nil && (*jsonTarget).Kind() == reflect.String {
+ // Based on my reading of go-yaml, it may return int, int64,
+ // float64, or uint64.
+ var s string
+ switch typedVal := typedYAMLObj.(type) {
+ case int:
+ s = strconv.FormatInt(int64(typedVal), 10)
+ case int64:
+ s = strconv.FormatInt(typedVal, 10)
+ case float64:
+ s = strconv.FormatFloat(typedVal, 'g', -1, 64)
+ case uint64:
+ s = strconv.FormatUint(typedVal, 10)
+ case bool:
+ if typedVal {
+ s = "true"
+ } else {
+ s = "false"
+ }
+ }
+ if len(s) > 0 {
+ yamlObj = interface{}(s)
+ }
+ }
+ return yamlObj, nil
+ }
+}
diff --git a/vendor/github.com/josharian/intern/README.md b/vendor/github.com/josharian/intern/README.md
new file mode 100644
index 00000000..ffc44b21
--- /dev/null
+++ b/vendor/github.com/josharian/intern/README.md
@@ -0,0 +1,5 @@
+Docs: https://godoc.org/github.com/josharian/intern
+
+See also [Go issue 5160](https://golang.org/issue/5160).
+
+License: MIT
diff --git a/vendor/github.com/josharian/intern/intern.go b/vendor/github.com/josharian/intern/intern.go
new file mode 100644
index 00000000..7acb1fe9
--- /dev/null
+++ b/vendor/github.com/josharian/intern/intern.go
@@ -0,0 +1,44 @@
+// Package intern interns strings.
+// Interning is best effort only.
+// Interned strings may be removed automatically
+// at any time without notification.
+// All functions may be called concurrently
+// with themselves and each other.
+package intern
+
+import "sync"
+
+var (
+ pool sync.Pool = sync.Pool{
+ New: func() interface{} {
+ return make(map[string]string)
+ },
+ }
+)
+
+// String returns s, interned.
+func String(s string) string {
+ m := pool.Get().(map[string]string)
+ c, ok := m[s]
+ if ok {
+ pool.Put(m)
+ return c
+ }
+ m[s] = s
+ pool.Put(m)
+ return s
+}
+
+// Bytes returns b converted to a string, interned.
+func Bytes(b []byte) string {
+ m := pool.Get().(map[string]string)
+ c, ok := m[string(b)]
+ if ok {
+ pool.Put(m)
+ return c
+ }
+ s := string(b)
+ m[s] = s
+ pool.Put(m)
+ return s
+}
diff --git a/vendor/github.com/josharian/intern/license.md b/vendor/github.com/josharian/intern/license.md
new file mode 100644
index 00000000..353d3055
--- /dev/null
+++ b/vendor/github.com/josharian/intern/license.md
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2019 Josh Bleecher Snyder
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/mailru/easyjson/LICENSE b/vendor/github.com/mailru/easyjson/LICENSE
new file mode 100644
index 00000000..fbff658f
--- /dev/null
+++ b/vendor/github.com/mailru/easyjson/LICENSE
@@ -0,0 +1,7 @@
+Copyright (c) 2016 Mail.Ru Group
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/mailru/easyjson/buffer/pool.go b/vendor/github.com/mailru/easyjson/buffer/pool.go
new file mode 100644
index 00000000..598a54af
--- /dev/null
+++ b/vendor/github.com/mailru/easyjson/buffer/pool.go
@@ -0,0 +1,278 @@
+// Package buffer implements a buffer for serialization, consisting of a chain of []byte-s to
+// reduce copying and to allow reuse of individual chunks.
+package buffer
+
+import (
+ "io"
+ "net"
+ "sync"
+)
+
+// PoolConfig contains configuration for the allocation and reuse strategy.
+type PoolConfig struct {
+ StartSize int // Minimum chunk size that is allocated.
+ PooledSize int // Minimum chunk size that is reused, reusing chunks too small will result in overhead.
+ MaxSize int // Maximum chunk size that will be allocated.
+}
+
+var config = PoolConfig{
+ StartSize: 128,
+ PooledSize: 512,
+ MaxSize: 32768,
+}
+
+// Reuse pool: chunk size -> pool.
+var buffers = map[int]*sync.Pool{}
+
+func initBuffers() {
+ for l := config.PooledSize; l <= config.MaxSize; l *= 2 {
+ buffers[l] = new(sync.Pool)
+ }
+}
+
+func init() {
+ initBuffers()
+}
+
+// Init sets up a non-default pooling and allocation strategy. Should be run before serialization is done.
+func Init(cfg PoolConfig) {
+ config = cfg
+ initBuffers()
+}
+
+// putBuf puts a chunk to reuse pool if it can be reused.
+func putBuf(buf []byte) {
+ size := cap(buf)
+ if size < config.PooledSize {
+ return
+ }
+ if c := buffers[size]; c != nil {
+ c.Put(buf[:0])
+ }
+}
+
+// getBuf gets a chunk from reuse pool or creates a new one if reuse failed.
+func getBuf(size int) []byte {
+ if size >= config.PooledSize {
+ if c := buffers[size]; c != nil {
+ v := c.Get()
+ if v != nil {
+ return v.([]byte)
+ }
+ }
+ }
+ return make([]byte, 0, size)
+}
+
+// Buffer is a buffer optimized for serialization without extra copying.
+type Buffer struct {
+
+ // Buf is the current chunk that can be used for serialization.
+ Buf []byte
+
+ toPool []byte
+ bufs [][]byte
+}
+
+// EnsureSpace makes sure that the current chunk contains at least s free bytes,
+// possibly creating a new chunk.
+func (b *Buffer) EnsureSpace(s int) {
+ if cap(b.Buf)-len(b.Buf) < s {
+ b.ensureSpaceSlow(s)
+ }
+}
+
+func (b *Buffer) ensureSpaceSlow(s int) {
+ l := len(b.Buf)
+ if l > 0 {
+ if cap(b.toPool) != cap(b.Buf) {
+ // Chunk was reallocated, toPool can be pooled.
+ putBuf(b.toPool)
+ }
+ if cap(b.bufs) == 0 {
+ b.bufs = make([][]byte, 0, 8)
+ }
+ b.bufs = append(b.bufs, b.Buf)
+ l = cap(b.toPool) * 2
+ } else {
+ l = config.StartSize
+ }
+
+ if l > config.MaxSize {
+ l = config.MaxSize
+ }
+ b.Buf = getBuf(l)
+ b.toPool = b.Buf
+}
+
+// AppendByte appends a single byte to buffer.
+func (b *Buffer) AppendByte(data byte) {
+ b.EnsureSpace(1)
+ b.Buf = append(b.Buf, data)
+}
+
+// AppendBytes appends a byte slice to buffer.
+func (b *Buffer) AppendBytes(data []byte) {
+ if len(data) <= cap(b.Buf)-len(b.Buf) {
+ b.Buf = append(b.Buf, data...) // fast path
+ } else {
+ b.appendBytesSlow(data)
+ }
+}
+
+func (b *Buffer) appendBytesSlow(data []byte) {
+ for len(data) > 0 {
+ b.EnsureSpace(1)
+
+ sz := cap(b.Buf) - len(b.Buf)
+ if sz > len(data) {
+ sz = len(data)
+ }
+
+ b.Buf = append(b.Buf, data[:sz]...)
+ data = data[sz:]
+ }
+}
+
+// AppendString appends a string to buffer.
+func (b *Buffer) AppendString(data string) {
+ if len(data) <= cap(b.Buf)-len(b.Buf) {
+ b.Buf = append(b.Buf, data...) // fast path
+ } else {
+ b.appendStringSlow(data)
+ }
+}
+
+func (b *Buffer) appendStringSlow(data string) {
+ for len(data) > 0 {
+ b.EnsureSpace(1)
+
+ sz := cap(b.Buf) - len(b.Buf)
+ if sz > len(data) {
+ sz = len(data)
+ }
+
+ b.Buf = append(b.Buf, data[:sz]...)
+ data = data[sz:]
+ }
+}
+
+// Size computes the size of a buffer by adding sizes of every chunk.
+func (b *Buffer) Size() int {
+ size := len(b.Buf)
+ for _, buf := range b.bufs {
+ size += len(buf)
+ }
+ return size
+}
+
+// DumpTo outputs the contents of a buffer to a writer and resets the buffer.
+func (b *Buffer) DumpTo(w io.Writer) (written int, err error) {
+ bufs := net.Buffers(b.bufs)
+ if len(b.Buf) > 0 {
+ bufs = append(bufs, b.Buf)
+ }
+ n, err := bufs.WriteTo(w)
+
+ for _, buf := range b.bufs {
+ putBuf(buf)
+ }
+ putBuf(b.toPool)
+
+ b.bufs = nil
+ b.Buf = nil
+ b.toPool = nil
+
+ return int(n), err
+}
+
+// BuildBytes creates a single byte slice with all the contents of the buffer. Data is
+// copied if it does not fit in a single chunk. You can optionally provide one byte
+// slice as argument that it will try to reuse.
+func (b *Buffer) BuildBytes(reuse ...[]byte) []byte {
+ if len(b.bufs) == 0 {
+ ret := b.Buf
+ b.toPool = nil
+ b.Buf = nil
+ return ret
+ }
+
+ var ret []byte
+ size := b.Size()
+
+ // If we got a buffer as argument and it is big enough, reuse it.
+ if len(reuse) == 1 && cap(reuse[0]) >= size {
+ ret = reuse[0][:0]
+ } else {
+ ret = make([]byte, 0, size)
+ }
+ for _, buf := range b.bufs {
+ ret = append(ret, buf...)
+ putBuf(buf)
+ }
+
+ ret = append(ret, b.Buf...)
+ putBuf(b.toPool)
+
+ b.bufs = nil
+ b.toPool = nil
+ b.Buf = nil
+
+ return ret
+}
+
+type readCloser struct {
+ offset int
+ bufs [][]byte
+}
+
+func (r *readCloser) Read(p []byte) (n int, err error) {
+ for _, buf := range r.bufs {
+ // Copy as much as we can.
+ x := copy(p[n:], buf[r.offset:])
+ n += x // Increment how much we filled.
+
+ // Did we empty the whole buffer?
+ if r.offset+x == len(buf) {
+ // On to the next buffer.
+ r.offset = 0
+ r.bufs = r.bufs[1:]
+
+ // We can release this buffer.
+ putBuf(buf)
+ } else {
+ r.offset += x
+ }
+
+ if n == len(p) {
+ break
+ }
+ }
+ // No buffers left or nothing read?
+ if len(r.bufs) == 0 {
+ err = io.EOF
+ }
+ return
+}
+
+func (r *readCloser) Close() error {
+ // Release all remaining buffers.
+ for _, buf := range r.bufs {
+ putBuf(buf)
+ }
+ // In case Close gets called multiple times.
+ r.bufs = nil
+
+ return nil
+}
+
+// ReadCloser creates an io.ReadCloser with all the contents of the buffer.
+func (b *Buffer) ReadCloser() io.ReadCloser {
+ ret := &readCloser{0, append(b.bufs, b.Buf)}
+
+ b.bufs = nil
+ b.toPool = nil
+ b.Buf = nil
+
+ return ret
+}
diff --git a/vendor/github.com/mailru/easyjson/jlexer/bytestostr.go b/vendor/github.com/mailru/easyjson/jlexer/bytestostr.go
new file mode 100644
index 00000000..ff7b27c5
--- /dev/null
+++ b/vendor/github.com/mailru/easyjson/jlexer/bytestostr.go
@@ -0,0 +1,24 @@
+// This file will only be included to the build if neither
+// easyjson_nounsafe nor appengine build tag is set. See README notes
+// for more details.
+
+//+build !easyjson_nounsafe
+//+build !appengine
+
+package jlexer
+
+import (
+ "reflect"
+ "unsafe"
+)
+
+// bytesToStr creates a string pointing at the slice to avoid copying.
+//
+// Warning: the string returned by the function should be used with care, as the whole input data
+// chunk may be either blocked from being freed by GC because of a single string or the buffer.Data
+// may be garbage-collected even when the string exists.
+func bytesToStr(data []byte) string {
+ h := (*reflect.SliceHeader)(unsafe.Pointer(&data))
+ shdr := reflect.StringHeader{Data: h.Data, Len: h.Len}
+ return *(*string)(unsafe.Pointer(&shdr))
+}
diff --git a/vendor/github.com/mailru/easyjson/jlexer/bytestostr_nounsafe.go b/vendor/github.com/mailru/easyjson/jlexer/bytestostr_nounsafe.go
new file mode 100644
index 00000000..864d1be6
--- /dev/null
+++ b/vendor/github.com/mailru/easyjson/jlexer/bytestostr_nounsafe.go
@@ -0,0 +1,13 @@
+// This file is included to the build if any of the buildtags below
+// are defined. Refer to README notes for more details.
+
+//+build easyjson_nounsafe appengine
+
+package jlexer
+
+// bytesToStr creates a string normally from []byte
+//
+// Note that this method is roughly 1.5x slower than using the 'unsafe' method.
+func bytesToStr(data []byte) string {
+ return string(data)
+}
diff --git a/vendor/github.com/mailru/easyjson/jlexer/error.go b/vendor/github.com/mailru/easyjson/jlexer/error.go
new file mode 100644
index 00000000..e90ec40d
--- /dev/null
+++ b/vendor/github.com/mailru/easyjson/jlexer/error.go
@@ -0,0 +1,15 @@
+package jlexer
+
+import "fmt"
+
+// LexerError implements the error interface and represents all possible errors that can be
+// generated during parsing the JSON data.
+type LexerError struct {
+ Reason string
+ Offset int
+ Data string
+}
+
+func (l *LexerError) Error() string {
+ return fmt.Sprintf("parse error: %s near offset %d of '%s'", l.Reason, l.Offset, l.Data)
+}
diff --git a/vendor/github.com/mailru/easyjson/jlexer/lexer.go b/vendor/github.com/mailru/easyjson/jlexer/lexer.go
new file mode 100644
index 00000000..b5f5e261
--- /dev/null
+++ b/vendor/github.com/mailru/easyjson/jlexer/lexer.go
@@ -0,0 +1,1244 @@
+// Package jlexer contains a JSON lexer implementation.
+//
+// It is expected that it is mostly used with generated parser code, so the interface is tuned
+// for a parser that knows what kind of data is expected.
+package jlexer
+
+import (
+ "bytes"
+ "encoding/base64"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "strconv"
+ "unicode"
+ "unicode/utf16"
+ "unicode/utf8"
+
+ "github.com/josharian/intern"
+)
+
+// tokenKind determines type of a token.
+type tokenKind byte
+
+const (
+ tokenUndef tokenKind = iota // No token.
+ tokenDelim // Delimiter: one of '{', '}', '[' or ']'.
+ tokenString // A string literal, e.g. "abc\u1234"
+ tokenNumber // Number literal, e.g. 1.5e5
+ tokenBool // Boolean literal: true or false.
+ tokenNull // null keyword.
+)
+
+// token describes a single token: type, position in the input and value.
+type token struct {
+ kind tokenKind // Type of a token.
+
+ boolValue bool // Value if a boolean literal token.
+ byteValueCloned bool // true if byteValue was allocated and does not refer to original json body
+ byteValue []byte // Raw value of a token.
+ delimValue byte
+}
+
+// Lexer is a JSON lexer: it iterates over JSON tokens in a byte slice.
+type Lexer struct {
+ Data []byte // Input data given to the lexer.
+
+ start int // Start of the current token.
+ pos int // Current unscanned position in the input stream.
+ token token // Last scanned token, if token.kind != tokenUndef.
+
+ firstElement bool // Whether current element is the first in array or an object.
+ wantSep byte // A comma or a colon character, which need to occur before a token.
+
+ UseMultipleErrors bool // If we want to use multiple errors.
+ fatalError error // Fatal error occurred during lexing. It is usually a syntax error.
+ multipleErrors []*LexerError // Semantic errors occurred during lexing. Marshalling will be continued after finding this errors.
+}
+
+// FetchToken scans the input for the next token.
+func (r *Lexer) FetchToken() {
+ r.token.kind = tokenUndef
+ r.start = r.pos
+
+ // Check if r.Data has r.pos element
+ // If it doesn't, it mean corrupted input data
+ if len(r.Data) < r.pos {
+ r.errParse("Unexpected end of data")
+ return
+ }
+ // Determine the type of a token by skipping whitespace and reading the
+ // first character.
+ for _, c := range r.Data[r.pos:] {
+ switch c {
+ case ':', ',':
+ if r.wantSep == c {
+ r.pos++
+ r.start++
+ r.wantSep = 0
+ } else {
+ r.errSyntax()
+ }
+
+ case ' ', '\t', '\r', '\n':
+ r.pos++
+ r.start++
+
+ case '"':
+ if r.wantSep != 0 {
+ r.errSyntax()
+ }
+
+ r.token.kind = tokenString
+ r.fetchString()
+ return
+
+ case '{', '[':
+ if r.wantSep != 0 {
+ r.errSyntax()
+ }
+ r.firstElement = true
+ r.token.kind = tokenDelim
+ r.token.delimValue = r.Data[r.pos]
+ r.pos++
+ return
+
+ case '}', ']':
+ if !r.firstElement && (r.wantSep != ',') {
+ r.errSyntax()
+ }
+ r.wantSep = 0
+ r.token.kind = tokenDelim
+ r.token.delimValue = r.Data[r.pos]
+ r.pos++
+ return
+
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '-':
+ if r.wantSep != 0 {
+ r.errSyntax()
+ }
+ r.token.kind = tokenNumber
+ r.fetchNumber()
+ return
+
+ case 'n':
+ if r.wantSep != 0 {
+ r.errSyntax()
+ }
+
+ r.token.kind = tokenNull
+ r.fetchNull()
+ return
+
+ case 't':
+ if r.wantSep != 0 {
+ r.errSyntax()
+ }
+
+ r.token.kind = tokenBool
+ r.token.boolValue = true
+ r.fetchTrue()
+ return
+
+ case 'f':
+ if r.wantSep != 0 {
+ r.errSyntax()
+ }
+
+ r.token.kind = tokenBool
+ r.token.boolValue = false
+ r.fetchFalse()
+ return
+
+ default:
+ r.errSyntax()
+ return
+ }
+ }
+ r.fatalError = io.EOF
+ return
+}
+
+// isTokenEnd returns true if the char can follow a non-delimiter token
+func isTokenEnd(c byte) bool {
+ return c == ' ' || c == '\t' || c == '\r' || c == '\n' || c == '[' || c == ']' || c == '{' || c == '}' || c == ',' || c == ':'
+}
+
+// fetchNull fetches and checks remaining bytes of null keyword.
+func (r *Lexer) fetchNull() {
+ r.pos += 4
+ if r.pos > len(r.Data) ||
+ r.Data[r.pos-3] != 'u' ||
+ r.Data[r.pos-2] != 'l' ||
+ r.Data[r.pos-1] != 'l' ||
+ (r.pos != len(r.Data) && !isTokenEnd(r.Data[r.pos])) {
+
+ r.pos -= 4
+ r.errSyntax()
+ }
+}
+
+// fetchTrue fetches and checks remaining bytes of true keyword.
+func (r *Lexer) fetchTrue() {
+ r.pos += 4
+ if r.pos > len(r.Data) ||
+ r.Data[r.pos-3] != 'r' ||
+ r.Data[r.pos-2] != 'u' ||
+ r.Data[r.pos-1] != 'e' ||
+ (r.pos != len(r.Data) && !isTokenEnd(r.Data[r.pos])) {
+
+ r.pos -= 4
+ r.errSyntax()
+ }
+}
+
+// fetchFalse fetches and checks remaining bytes of false keyword.
+func (r *Lexer) fetchFalse() {
+ r.pos += 5
+ if r.pos > len(r.Data) ||
+ r.Data[r.pos-4] != 'a' ||
+ r.Data[r.pos-3] != 'l' ||
+ r.Data[r.pos-2] != 's' ||
+ r.Data[r.pos-1] != 'e' ||
+ (r.pos != len(r.Data) && !isTokenEnd(r.Data[r.pos])) {
+
+ r.pos -= 5
+ r.errSyntax()
+ }
+}
+
+// fetchNumber scans a number literal token.
+func (r *Lexer) fetchNumber() {
+ hasE := false
+ afterE := false
+ hasDot := false
+
+ r.pos++
+ for i, c := range r.Data[r.pos:] {
+ switch {
+ case c >= '0' && c <= '9':
+ afterE = false
+ case c == '.' && !hasDot:
+ hasDot = true
+ case (c == 'e' || c == 'E') && !hasE:
+ hasE = true
+ hasDot = true
+ afterE = true
+ case (c == '+' || c == '-') && afterE:
+ afterE = false
+ default:
+ r.pos += i
+ if !isTokenEnd(c) {
+ r.errSyntax()
+ } else {
+ r.token.byteValue = r.Data[r.start:r.pos]
+ }
+ return
+ }
+ }
+
+ r.pos = len(r.Data)
+ r.token.byteValue = r.Data[r.start:]
+}
+
+// findStringLen tries to scan into the string literal for ending quote char to determine required size.
+// The size will be exact if no escapes are present and may be inexact if there are escaped chars.
+func findStringLen(data []byte) (isValid bool, length int) {
+ for {
+ idx := bytes.IndexByte(data, '"')
+ if idx == -1 {
+ return false, len(data)
+ }
+ if idx == 0 || (idx > 0 && data[idx-1] != '\\') {
+ return true, length + idx
+ }
+
+ // count \\\\\\\ sequences. even number of slashes means quote is not really escaped
+ cnt := 1
+ for idx-cnt-1 >= 0 && data[idx-cnt-1] == '\\' {
+ cnt++
+ }
+ if cnt%2 == 0 {
+ return true, length + idx
+ }
+
+ length += idx + 1
+ data = data[idx+1:]
+ }
+}
+
+// unescapeStringToken performs unescaping of string token.
+// if no escaping is needed, original string is returned, otherwise - a new one allocated
+func (r *Lexer) unescapeStringToken() (err error) {
+ data := r.token.byteValue
+ var unescapedData []byte
+
+ for {
+ i := bytes.IndexByte(data, '\\')
+ if i == -1 {
+ break
+ }
+
+ escapedRune, escapedBytes, err := decodeEscape(data[i:])
+ if err != nil {
+ r.errParse(err.Error())
+ return err
+ }
+
+ if unescapedData == nil {
+ unescapedData = make([]byte, 0, len(r.token.byteValue))
+ }
+
+ var d [4]byte
+ s := utf8.EncodeRune(d[:], escapedRune)
+ unescapedData = append(unescapedData, data[:i]...)
+ unescapedData = append(unescapedData, d[:s]...)
+
+ data = data[i+escapedBytes:]
+ }
+
+ if unescapedData != nil {
+ r.token.byteValue = append(unescapedData, data...)
+ r.token.byteValueCloned = true
+ }
+ return
+}
+
+// getu4 decodes \uXXXX from the beginning of s, returning the hex value,
+// or it returns -1.
+func getu4(s []byte) rune {
+ if len(s) < 6 || s[0] != '\\' || s[1] != 'u' {
+ return -1
+ }
+ var val rune
+ for i := 2; i < len(s) && i < 6; i++ {
+ var v byte
+ c := s[i]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ v = c - '0'
+ case 'a', 'b', 'c', 'd', 'e', 'f':
+ v = c - 'a' + 10
+ case 'A', 'B', 'C', 'D', 'E', 'F':
+ v = c - 'A' + 10
+ default:
+ return -1
+ }
+
+ val <<= 4
+ val |= rune(v)
+ }
+ return val
+}
+
+// decodeEscape processes a single escape sequence and returns number of bytes processed.
+func decodeEscape(data []byte) (decoded rune, bytesProcessed int, err error) {
+ if len(data) < 2 {
+ return 0, 0, errors.New("incorrect escape symbol \\ at the end of token")
+ }
+
+ c := data[1]
+ switch c {
+ case '"', '/', '\\':
+ return rune(c), 2, nil
+ case 'b':
+ return '\b', 2, nil
+ case 'f':
+ return '\f', 2, nil
+ case 'n':
+ return '\n', 2, nil
+ case 'r':
+ return '\r', 2, nil
+ case 't':
+ return '\t', 2, nil
+ case 'u':
+ rr := getu4(data)
+ if rr < 0 {
+ return 0, 0, errors.New("incorrectly escaped \\uXXXX sequence")
+ }
+
+ read := 6
+ if utf16.IsSurrogate(rr) {
+ rr1 := getu4(data[read:])
+ if dec := utf16.DecodeRune(rr, rr1); dec != unicode.ReplacementChar {
+ read += 6
+ rr = dec
+ } else {
+ rr = unicode.ReplacementChar
+ }
+ }
+ return rr, read, nil
+ }
+
+ return 0, 0, errors.New("incorrectly escaped bytes")
+}
+
+// fetchString scans a string literal token.
+func (r *Lexer) fetchString() {
+ r.pos++
+ data := r.Data[r.pos:]
+
+ isValid, length := findStringLen(data)
+ if !isValid {
+ r.pos += length
+ r.errParse("unterminated string literal")
+ return
+ }
+ r.token.byteValue = data[:length]
+ r.pos += length + 1 // skip closing '"' as well
+}
+
+// scanToken scans the next token if no token is currently available in the lexer.
+func (r *Lexer) scanToken() {
+ if r.token.kind != tokenUndef || r.fatalError != nil {
+ return
+ }
+
+ r.FetchToken()
+}
+
+// consume resets the current token to allow scanning the next one.
+func (r *Lexer) consume() {
+ r.token.kind = tokenUndef
+ r.token.byteValueCloned = false
+ r.token.delimValue = 0
+}
+
+// Ok returns true if no error (including io.EOF) was encountered during scanning.
+func (r *Lexer) Ok() bool {
+ return r.fatalError == nil
+}
+
+const maxErrorContextLen = 13
+
+func (r *Lexer) errParse(what string) {
+ if r.fatalError == nil {
+ var str string
+ if len(r.Data)-r.pos <= maxErrorContextLen {
+ str = string(r.Data)
+ } else {
+ str = string(r.Data[r.pos:r.pos+maxErrorContextLen-3]) + "..."
+ }
+ r.fatalError = &LexerError{
+ Reason: what,
+ Offset: r.pos,
+ Data: str,
+ }
+ }
+}
+
+func (r *Lexer) errSyntax() {
+ r.errParse("syntax error")
+}
+
+func (r *Lexer) errInvalidToken(expected string) {
+ if r.fatalError != nil {
+ return
+ }
+ if r.UseMultipleErrors {
+ r.pos = r.start
+ r.consume()
+ r.SkipRecursive()
+ switch expected {
+ case "[":
+ r.token.delimValue = ']'
+ r.token.kind = tokenDelim
+ case "{":
+ r.token.delimValue = '}'
+ r.token.kind = tokenDelim
+ }
+ r.addNonfatalError(&LexerError{
+ Reason: fmt.Sprintf("expected %s", expected),
+ Offset: r.start,
+ Data: string(r.Data[r.start:r.pos]),
+ })
+ return
+ }
+
+ var str string
+ if len(r.token.byteValue) <= maxErrorContextLen {
+ str = string(r.token.byteValue)
+ } else {
+ str = string(r.token.byteValue[:maxErrorContextLen-3]) + "..."
+ }
+ r.fatalError = &LexerError{
+ Reason: fmt.Sprintf("expected %s", expected),
+ Offset: r.pos,
+ Data: str,
+ }
+}
+
+func (r *Lexer) GetPos() int {
+ return r.pos
+}
+
+// Delim consumes a token and verifies that it is the given delimiter.
+func (r *Lexer) Delim(c byte) {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+
+ if !r.Ok() || r.token.delimValue != c {
+ r.consume() // errInvalidToken can change token if UseMultipleErrors is enabled.
+ r.errInvalidToken(string([]byte{c}))
+ } else {
+ r.consume()
+ }
+}
+
+// IsDelim returns true if there was no scanning error and next token is the given delimiter.
+func (r *Lexer) IsDelim(c byte) bool {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ return !r.Ok() || r.token.delimValue == c
+}
+
+// Null verifies that the next token is null and consumes it.
+func (r *Lexer) Null() {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ if !r.Ok() || r.token.kind != tokenNull {
+ r.errInvalidToken("null")
+ }
+ r.consume()
+}
+
+// IsNull returns true if the next token is a null keyword.
+func (r *Lexer) IsNull() bool {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ return r.Ok() && r.token.kind == tokenNull
+}
+
+// Skip skips a single token.
+func (r *Lexer) Skip() {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ r.consume()
+}
+
+// SkipRecursive skips next array or object completely, or just skips a single token if not
+// an array/object.
+//
+// Note: no syntax validation is performed on the skipped data.
+func (r *Lexer) SkipRecursive() {
+ r.scanToken()
+ var start, end byte
+ startPos := r.start
+
+ switch r.token.delimValue {
+ case '{':
+ start, end = '{', '}'
+ case '[':
+ start, end = '[', ']'
+ default:
+ r.consume()
+ return
+ }
+
+ r.consume()
+
+ level := 1
+ inQuotes := false
+ wasEscape := false
+
+ for i, c := range r.Data[r.pos:] {
+ switch {
+ case c == start && !inQuotes:
+ level++
+ case c == end && !inQuotes:
+ level--
+ if level == 0 {
+ r.pos += i + 1
+ if !json.Valid(r.Data[startPos:r.pos]) {
+ r.pos = len(r.Data)
+ r.fatalError = &LexerError{
+ Reason: "skipped array/object json value is invalid",
+ Offset: r.pos,
+ Data: string(r.Data[r.pos:]),
+ }
+ }
+ return
+ }
+ case c == '\\' && inQuotes:
+ wasEscape = !wasEscape
+ continue
+ case c == '"' && inQuotes:
+ inQuotes = wasEscape
+ case c == '"':
+ inQuotes = true
+ }
+ wasEscape = false
+ }
+ r.pos = len(r.Data)
+ r.fatalError = &LexerError{
+ Reason: "EOF reached while skipping array/object or token",
+ Offset: r.pos,
+ Data: string(r.Data[r.pos:]),
+ }
+}
+
+// Raw fetches the next item recursively as a data slice
+func (r *Lexer) Raw() []byte {
+ r.SkipRecursive()
+ if !r.Ok() {
+ return nil
+ }
+ return r.Data[r.start:r.pos]
+}
+
+// IsStart returns whether the lexer is positioned at the start
+// of an input string.
+func (r *Lexer) IsStart() bool {
+ return r.pos == 0
+}
+
+// Consumed reads all remaining bytes from the input, publishing an error if
+// there is anything but whitespace remaining.
+func (r *Lexer) Consumed() {
+ if r.pos > len(r.Data) || !r.Ok() {
+ return
+ }
+
+ for _, c := range r.Data[r.pos:] {
+ if c != ' ' && c != '\t' && c != '\r' && c != '\n' {
+ r.AddError(&LexerError{
+ Reason: "invalid character '" + string(c) + "' after top-level value",
+ Offset: r.pos,
+ Data: string(r.Data[r.pos:]),
+ })
+ return
+ }
+
+ r.pos++
+ r.start++
+ }
+}
+
+func (r *Lexer) unsafeString(skipUnescape bool) (string, []byte) {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ if !r.Ok() || r.token.kind != tokenString {
+ r.errInvalidToken("string")
+ return "", nil
+ }
+ if !skipUnescape {
+ if err := r.unescapeStringToken(); err != nil {
+ r.errInvalidToken("string")
+ return "", nil
+ }
+ }
+
+ bytes := r.token.byteValue
+ ret := bytesToStr(r.token.byteValue)
+ r.consume()
+ return ret, bytes
+}
+
+// UnsafeString returns the string value if the token is a string literal.
+//
+// Warning: returned string may point to the input buffer, so the string should not outlive
+// the input buffer. Intended pattern of usage is as an argument to a switch statement.
+func (r *Lexer) UnsafeString() string {
+ ret, _ := r.unsafeString(false)
+ return ret
+}
+
+// UnsafeBytes returns the byte slice if the token is a string literal.
+func (r *Lexer) UnsafeBytes() []byte {
+ _, ret := r.unsafeString(false)
+ return ret
+}
+
+// UnsafeFieldName returns current member name string token
+func (r *Lexer) UnsafeFieldName(skipUnescape bool) string {
+ ret, _ := r.unsafeString(skipUnescape)
+ return ret
+}
+
+// String reads a string literal.
+func (r *Lexer) String() string {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ if !r.Ok() || r.token.kind != tokenString {
+ r.errInvalidToken("string")
+ return ""
+ }
+ if err := r.unescapeStringToken(); err != nil {
+ r.errInvalidToken("string")
+ return ""
+ }
+ var ret string
+ if r.token.byteValueCloned {
+ ret = bytesToStr(r.token.byteValue)
+ } else {
+ ret = string(r.token.byteValue)
+ }
+ r.consume()
+ return ret
+}
+
+// StringIntern reads a string literal, and performs string interning on it.
+func (r *Lexer) StringIntern() string {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ if !r.Ok() || r.token.kind != tokenString {
+ r.errInvalidToken("string")
+ return ""
+ }
+ if err := r.unescapeStringToken(); err != nil {
+ r.errInvalidToken("string")
+ return ""
+ }
+ ret := intern.Bytes(r.token.byteValue)
+ r.consume()
+ return ret
+}
+
+// Bytes reads a string literal and base64 decodes it into a byte slice.
+func (r *Lexer) Bytes() []byte {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ if !r.Ok() || r.token.kind != tokenString {
+ r.errInvalidToken("string")
+ return nil
+ }
+ if err := r.unescapeStringToken(); err != nil {
+ r.errInvalidToken("string")
+ return nil
+ }
+ ret := make([]byte, base64.StdEncoding.DecodedLen(len(r.token.byteValue)))
+ n, err := base64.StdEncoding.Decode(ret, r.token.byteValue)
+ if err != nil {
+ r.fatalError = &LexerError{
+ Reason: err.Error(),
+ }
+ return nil
+ }
+
+ r.consume()
+ return ret[:n]
+}
+
+// Bool reads a true or false boolean keyword.
+func (r *Lexer) Bool() bool {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ if !r.Ok() || r.token.kind != tokenBool {
+ r.errInvalidToken("bool")
+ return false
+ }
+ ret := r.token.boolValue
+ r.consume()
+ return ret
+}
+
+func (r *Lexer) number() string {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ if !r.Ok() || r.token.kind != tokenNumber {
+ r.errInvalidToken("number")
+ return ""
+ }
+ ret := bytesToStr(r.token.byteValue)
+ r.consume()
+ return ret
+}
+
+func (r *Lexer) Uint8() uint8 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseUint(s, 10, 8)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return uint8(n)
+}
+
+func (r *Lexer) Uint16() uint16 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseUint(s, 10, 16)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return uint16(n)
+}
+
+func (r *Lexer) Uint32() uint32 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseUint(s, 10, 32)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return uint32(n)
+}
+
+func (r *Lexer) Uint64() uint64 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseUint(s, 10, 64)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return n
+}
+
+func (r *Lexer) Uint() uint {
+ return uint(r.Uint64())
+}
+
+func (r *Lexer) Int8() int8 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseInt(s, 10, 8)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return int8(n)
+}
+
+func (r *Lexer) Int16() int16 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseInt(s, 10, 16)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return int16(n)
+}
+
+func (r *Lexer) Int32() int32 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseInt(s, 10, 32)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return int32(n)
+}
+
+func (r *Lexer) Int64() int64 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseInt(s, 10, 64)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return n
+}
+
+func (r *Lexer) Int() int {
+ return int(r.Int64())
+}
+
+func (r *Lexer) Uint8Str() uint8 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseUint(s, 10, 8)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return uint8(n)
+}
+
+func (r *Lexer) Uint16Str() uint16 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseUint(s, 10, 16)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return uint16(n)
+}
+
+func (r *Lexer) Uint32Str() uint32 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseUint(s, 10, 32)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return uint32(n)
+}
+
+func (r *Lexer) Uint64Str() uint64 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseUint(s, 10, 64)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return n
+}
+
+func (r *Lexer) UintStr() uint {
+ return uint(r.Uint64Str())
+}
+
+func (r *Lexer) UintptrStr() uintptr {
+ return uintptr(r.Uint64Str())
+}
+
+func (r *Lexer) Int8Str() int8 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseInt(s, 10, 8)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return int8(n)
+}
+
+func (r *Lexer) Int16Str() int16 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseInt(s, 10, 16)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return int16(n)
+}
+
+func (r *Lexer) Int32Str() int32 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseInt(s, 10, 32)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return int32(n)
+}
+
+func (r *Lexer) Int64Str() int64 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseInt(s, 10, 64)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return n
+}
+
+func (r *Lexer) IntStr() int {
+ return int(r.Int64Str())
+}
+
+func (r *Lexer) Float32() float32 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseFloat(s, 32)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return float32(n)
+}
+
+func (r *Lexer) Float32Str() float32 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+ n, err := strconv.ParseFloat(s, 32)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return float32(n)
+}
+
+func (r *Lexer) Float64() float64 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseFloat(s, 64)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return n
+}
+
+func (r *Lexer) Float64Str() float64 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+ n, err := strconv.ParseFloat(s, 64)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return n
+}
+
+func (r *Lexer) Error() error {
+ return r.fatalError
+}
+
+func (r *Lexer) AddError(e error) {
+ if r.fatalError == nil {
+ r.fatalError = e
+ }
+}
+
+func (r *Lexer) AddNonFatalError(e error) {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Data: string(r.Data[r.start:r.pos]),
+ Reason: e.Error(),
+ })
+}
+
+func (r *Lexer) addNonfatalError(err *LexerError) {
+ if r.UseMultipleErrors {
+ // We don't want to add errors with the same offset.
+ if len(r.multipleErrors) != 0 && r.multipleErrors[len(r.multipleErrors)-1].Offset == err.Offset {
+ return
+ }
+ r.multipleErrors = append(r.multipleErrors, err)
+ return
+ }
+ r.fatalError = err
+}
+
+func (r *Lexer) GetNonFatalErrors() []*LexerError {
+ return r.multipleErrors
+}
+
+// JsonNumber fetches and json.Number from 'encoding/json' package.
+// Both int, float or string, contains them are valid values
+func (r *Lexer) JsonNumber() json.Number {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ if !r.Ok() {
+ r.errInvalidToken("json.Number")
+ return json.Number("")
+ }
+
+ switch r.token.kind {
+ case tokenString:
+ return json.Number(r.String())
+ case tokenNumber:
+ return json.Number(r.Raw())
+ case tokenNull:
+ r.Null()
+ return json.Number("")
+ default:
+ r.errSyntax()
+ return json.Number("")
+ }
+}
+
+// Interface fetches an interface{} analogous to the 'encoding/json' package.
+func (r *Lexer) Interface() interface{} {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+
+ if !r.Ok() {
+ return nil
+ }
+ switch r.token.kind {
+ case tokenString:
+ return r.String()
+ case tokenNumber:
+ return r.Float64()
+ case tokenBool:
+ return r.Bool()
+ case tokenNull:
+ r.Null()
+ return nil
+ }
+
+ if r.token.delimValue == '{' {
+ r.consume()
+
+ ret := map[string]interface{}{}
+ for !r.IsDelim('}') {
+ key := r.String()
+ r.WantColon()
+ ret[key] = r.Interface()
+ r.WantComma()
+ }
+ r.Delim('}')
+
+ if r.Ok() {
+ return ret
+ } else {
+ return nil
+ }
+ } else if r.token.delimValue == '[' {
+ r.consume()
+
+ ret := []interface{}{}
+ for !r.IsDelim(']') {
+ ret = append(ret, r.Interface())
+ r.WantComma()
+ }
+ r.Delim(']')
+
+ if r.Ok() {
+ return ret
+ } else {
+ return nil
+ }
+ }
+ r.errSyntax()
+ return nil
+}
+
+// WantComma requires a comma to be present before fetching next token.
+func (r *Lexer) WantComma() {
+ r.wantSep = ','
+ r.firstElement = false
+}
+
+// WantColon requires a colon to be present before fetching next token.
+func (r *Lexer) WantColon() {
+ r.wantSep = ':'
+ r.firstElement = false
+}
diff --git a/vendor/github.com/mailru/easyjson/jwriter/writer.go b/vendor/github.com/mailru/easyjson/jwriter/writer.go
new file mode 100644
index 00000000..2c5b2010
--- /dev/null
+++ b/vendor/github.com/mailru/easyjson/jwriter/writer.go
@@ -0,0 +1,405 @@
+// Package jwriter contains a JSON writer.
+package jwriter
+
+import (
+ "io"
+ "strconv"
+ "unicode/utf8"
+
+ "github.com/mailru/easyjson/buffer"
+)
+
+// Flags describe various encoding options. The behavior may be actually implemented in the encoder, but
+// Flags field in Writer is used to set and pass them around.
+type Flags int
+
+const (
+ NilMapAsEmpty Flags = 1 << iota // Encode nil map as '{}' rather than 'null'.
+ NilSliceAsEmpty // Encode nil slice as '[]' rather than 'null'.
+)
+
+// Writer is a JSON writer.
+type Writer struct {
+ Flags Flags
+
+ Error error
+ Buffer buffer.Buffer
+ NoEscapeHTML bool
+}
+
+// Size returns the size of the data that was written out.
+func (w *Writer) Size() int {
+ return w.Buffer.Size()
+}
+
+// DumpTo outputs the data to given io.Writer, resetting the buffer.
+func (w *Writer) DumpTo(out io.Writer) (written int, err error) {
+ return w.Buffer.DumpTo(out)
+}
+
+// BuildBytes returns writer data as a single byte slice. You can optionally provide one byte slice
+// as argument that it will try to reuse.
+func (w *Writer) BuildBytes(reuse ...[]byte) ([]byte, error) {
+ if w.Error != nil {
+ return nil, w.Error
+ }
+
+ return w.Buffer.BuildBytes(reuse...), nil
+}
+
+// ReadCloser returns an io.ReadCloser that can be used to read the data.
+// ReadCloser also resets the buffer.
+func (w *Writer) ReadCloser() (io.ReadCloser, error) {
+ if w.Error != nil {
+ return nil, w.Error
+ }
+
+ return w.Buffer.ReadCloser(), nil
+}
+
+// RawByte appends raw binary data to the buffer.
+func (w *Writer) RawByte(c byte) {
+ w.Buffer.AppendByte(c)
+}
+
+// RawByte appends raw binary data to the buffer.
+func (w *Writer) RawString(s string) {
+ w.Buffer.AppendString(s)
+}
+
+// Raw appends raw binary data to the buffer or sets the error if it is given. Useful for
+// calling with results of MarshalJSON-like functions.
+func (w *Writer) Raw(data []byte, err error) {
+ switch {
+ case w.Error != nil:
+ return
+ case err != nil:
+ w.Error = err
+ case len(data) > 0:
+ w.Buffer.AppendBytes(data)
+ default:
+ w.RawString("null")
+ }
+}
+
+// RawText encloses raw binary data in quotes and appends in to the buffer.
+// Useful for calling with results of MarshalText-like functions.
+func (w *Writer) RawText(data []byte, err error) {
+ switch {
+ case w.Error != nil:
+ return
+ case err != nil:
+ w.Error = err
+ case len(data) > 0:
+ w.String(string(data))
+ default:
+ w.RawString("null")
+ }
+}
+
+// Base64Bytes appends data to the buffer after base64 encoding it
+func (w *Writer) Base64Bytes(data []byte) {
+ if data == nil {
+ w.Buffer.AppendString("null")
+ return
+ }
+ w.Buffer.AppendByte('"')
+ w.base64(data)
+ w.Buffer.AppendByte('"')
+}
+
+func (w *Writer) Uint8(n uint8) {
+ w.Buffer.EnsureSpace(3)
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
+}
+
+func (w *Writer) Uint16(n uint16) {
+ w.Buffer.EnsureSpace(5)
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
+}
+
+func (w *Writer) Uint32(n uint32) {
+ w.Buffer.EnsureSpace(10)
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
+}
+
+func (w *Writer) Uint(n uint) {
+ w.Buffer.EnsureSpace(20)
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
+}
+
+func (w *Writer) Uint64(n uint64) {
+ w.Buffer.EnsureSpace(20)
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, n, 10)
+}
+
+func (w *Writer) Int8(n int8) {
+ w.Buffer.EnsureSpace(4)
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
+}
+
+func (w *Writer) Int16(n int16) {
+ w.Buffer.EnsureSpace(6)
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
+}
+
+func (w *Writer) Int32(n int32) {
+ w.Buffer.EnsureSpace(11)
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
+}
+
+func (w *Writer) Int(n int) {
+ w.Buffer.EnsureSpace(21)
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
+}
+
+func (w *Writer) Int64(n int64) {
+ w.Buffer.EnsureSpace(21)
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, n, 10)
+}
+
+func (w *Writer) Uint8Str(n uint8) {
+ w.Buffer.EnsureSpace(3)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Uint16Str(n uint16) {
+ w.Buffer.EnsureSpace(5)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Uint32Str(n uint32) {
+ w.Buffer.EnsureSpace(10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) UintStr(n uint) {
+ w.Buffer.EnsureSpace(20)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Uint64Str(n uint64) {
+ w.Buffer.EnsureSpace(20)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, n, 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) UintptrStr(n uintptr) {
+ w.Buffer.EnsureSpace(20)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Int8Str(n int8) {
+ w.Buffer.EnsureSpace(4)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Int16Str(n int16) {
+ w.Buffer.EnsureSpace(6)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Int32Str(n int32) {
+ w.Buffer.EnsureSpace(11)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) IntStr(n int) {
+ w.Buffer.EnsureSpace(21)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Int64Str(n int64) {
+ w.Buffer.EnsureSpace(21)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, n, 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Float32(n float32) {
+ w.Buffer.EnsureSpace(20)
+ w.Buffer.Buf = strconv.AppendFloat(w.Buffer.Buf, float64(n), 'g', -1, 32)
+}
+
+func (w *Writer) Float32Str(n float32) {
+ w.Buffer.EnsureSpace(20)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendFloat(w.Buffer.Buf, float64(n), 'g', -1, 32)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Float64(n float64) {
+ w.Buffer.EnsureSpace(20)
+ w.Buffer.Buf = strconv.AppendFloat(w.Buffer.Buf, n, 'g', -1, 64)
+}
+
+func (w *Writer) Float64Str(n float64) {
+ w.Buffer.EnsureSpace(20)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendFloat(w.Buffer.Buf, float64(n), 'g', -1, 64)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Bool(v bool) {
+ w.Buffer.EnsureSpace(5)
+ if v {
+ w.Buffer.Buf = append(w.Buffer.Buf, "true"...)
+ } else {
+ w.Buffer.Buf = append(w.Buffer.Buf, "false"...)
+ }
+}
+
+const chars = "0123456789abcdef"
+
+func getTable(falseValues ...int) [128]bool {
+ table := [128]bool{}
+
+ for i := 0; i < 128; i++ {
+ table[i] = true
+ }
+
+ for _, v := range falseValues {
+ table[v] = false
+ }
+
+ return table
+}
+
+var (
+ htmlEscapeTable = getTable(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, '"', '&', '<', '>', '\\')
+ htmlNoEscapeTable = getTable(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, '"', '\\')
+)
+
+func (w *Writer) String(s string) {
+ w.Buffer.AppendByte('"')
+
+ // Portions of the string that contain no escapes are appended as
+ // byte slices.
+
+ p := 0 // last non-escape symbol
+
+ escapeTable := &htmlEscapeTable
+ if w.NoEscapeHTML {
+ escapeTable = &htmlNoEscapeTable
+ }
+
+ for i := 0; i < len(s); {
+ c := s[i]
+
+ if c < utf8.RuneSelf {
+ if escapeTable[c] {
+ // single-width character, no escaping is required
+ i++
+ continue
+ }
+
+ w.Buffer.AppendString(s[p:i])
+ switch c {
+ case '\t':
+ w.Buffer.AppendString(`\t`)
+ case '\r':
+ w.Buffer.AppendString(`\r`)
+ case '\n':
+ w.Buffer.AppendString(`\n`)
+ case '\\':
+ w.Buffer.AppendString(`\\`)
+ case '"':
+ w.Buffer.AppendString(`\"`)
+ default:
+ w.Buffer.AppendString(`\u00`)
+ w.Buffer.AppendByte(chars[c>>4])
+ w.Buffer.AppendByte(chars[c&0xf])
+ }
+
+ i++
+ p = i
+ continue
+ }
+
+ // broken utf
+ runeValue, runeWidth := utf8.DecodeRuneInString(s[i:])
+ if runeValue == utf8.RuneError && runeWidth == 1 {
+ w.Buffer.AppendString(s[p:i])
+ w.Buffer.AppendString(`\ufffd`)
+ i++
+ p = i
+ continue
+ }
+
+ // jsonp stuff - tab separator and line separator
+ if runeValue == '\u2028' || runeValue == '\u2029' {
+ w.Buffer.AppendString(s[p:i])
+ w.Buffer.AppendString(`\u202`)
+ w.Buffer.AppendByte(chars[runeValue&0xf])
+ i += runeWidth
+ p = i
+ continue
+ }
+ i += runeWidth
+ }
+ w.Buffer.AppendString(s[p:])
+ w.Buffer.AppendByte('"')
+}
+
+const encode = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
+const padChar = '='
+
+func (w *Writer) base64(in []byte) {
+
+ if len(in) == 0 {
+ return
+ }
+
+ w.Buffer.EnsureSpace(((len(in)-1)/3 + 1) * 4)
+
+ si := 0
+ n := (len(in) / 3) * 3
+
+ for si < n {
+ // Convert 3x 8bit source bytes into 4 bytes
+ val := uint(in[si+0])<<16 | uint(in[si+1])<<8 | uint(in[si+2])
+
+ w.Buffer.Buf = append(w.Buffer.Buf, encode[val>>18&0x3F], encode[val>>12&0x3F], encode[val>>6&0x3F], encode[val&0x3F])
+
+ si += 3
+ }
+
+ remain := len(in) - si
+ if remain == 0 {
+ return
+ }
+
+ // Add the remaining small block
+ val := uint(in[si+0]) << 16
+ if remain == 2 {
+ val |= uint(in[si+1]) << 8
+ }
+
+ w.Buffer.Buf = append(w.Buffer.Buf, encode[val>>18&0x3F], encode[val>>12&0x3F])
+
+ switch remain {
+ case 2:
+ w.Buffer.Buf = append(w.Buffer.Buf, encode[val>>6&0x3F], byte(padChar))
+ case 1:
+ w.Buffer.Buf = append(w.Buffer.Buf, byte(padChar), byte(padChar))
+ }
+}
diff --git a/vendor/github.com/mohae/deepcopy/.gitignore b/vendor/github.com/mohae/deepcopy/.gitignore
new file mode 100644
index 00000000..5846dd15
--- /dev/null
+++ b/vendor/github.com/mohae/deepcopy/.gitignore
@@ -0,0 +1,26 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
+*.test
+*~
+*.out
+*.log
diff --git a/vendor/github.com/mohae/deepcopy/.travis.yml b/vendor/github.com/mohae/deepcopy/.travis.yml
new file mode 100644
index 00000000..fd47a8cf
--- /dev/null
+++ b/vendor/github.com/mohae/deepcopy/.travis.yml
@@ -0,0 +1,11 @@
+language: go
+
+go:
+ - tip
+
+matrix:
+ allow_failures:
+ - go: tip
+
+script:
+ - go test ./...
diff --git a/vendor/github.com/mohae/deepcopy/LICENSE b/vendor/github.com/mohae/deepcopy/LICENSE
new file mode 100644
index 00000000..419673f0
--- /dev/null
+++ b/vendor/github.com/mohae/deepcopy/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Joel
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/mohae/deepcopy/README.md b/vendor/github.com/mohae/deepcopy/README.md
new file mode 100644
index 00000000..f8184188
--- /dev/null
+++ b/vendor/github.com/mohae/deepcopy/README.md
@@ -0,0 +1,8 @@
+deepCopy
+========
+[![GoDoc](https://godoc.org/github.com/mohae/deepcopy?status.svg)](https://godoc.org/github.com/mohae/deepcopy)[![Build Status](https://travis-ci.org/mohae/deepcopy.png)](https://travis-ci.org/mohae/deepcopy)
+
+DeepCopy makes deep copies of things: unexported field values are not copied.
+
+## Usage
+ cpy := deepcopy.Copy(orig)
diff --git a/vendor/github.com/mohae/deepcopy/deepcopy.go b/vendor/github.com/mohae/deepcopy/deepcopy.go
new file mode 100644
index 00000000..ba763ad0
--- /dev/null
+++ b/vendor/github.com/mohae/deepcopy/deepcopy.go
@@ -0,0 +1,125 @@
+// deepcopy makes deep copies of things. A standard copy will copy the
+// pointers: deep copy copies the values pointed to. Unexported field
+// values are not copied.
+//
+// Copyright (c)2014-2016, Joel Scoble (github.com/mohae), all rights reserved.
+// License: MIT, for more details check the included LICENSE file.
+package deepcopy
+
+import (
+ "reflect"
+ "time"
+)
+
+// Interface for delegating copy process to type
+type Interface interface {
+ DeepCopy() interface{}
+}
+
+// Iface is an alias to Copy; this exists for backwards compatibility reasons.
+func Iface(iface interface{}) interface{} {
+ return Copy(iface)
+}
+
+// Copy creates a deep copy of whatever is passed to it and returns the copy
+// in an interface{}. The returned value will need to be asserted to the
+// correct type.
+func Copy(src interface{}) interface{} {
+ if src == nil {
+ return nil
+ }
+
+ // Make the interface a reflect.Value
+ original := reflect.ValueOf(src)
+
+ // Make a copy of the same type as the original.
+ cpy := reflect.New(original.Type()).Elem()
+
+ // Recursively copy the original.
+ copyRecursive(original, cpy)
+
+ // Return the copy as an interface.
+ return cpy.Interface()
+}
+
+// copyRecursive does the actual copying of the interface. It currently has
+// limited support for what it can handle. Add as needed.
+func copyRecursive(original, cpy reflect.Value) {
+ // check for implement deepcopy.Interface
+ if original.CanInterface() {
+ if copier, ok := original.Interface().(Interface); ok {
+ cpy.Set(reflect.ValueOf(copier.DeepCopy()))
+ return
+ }
+ }
+
+ // handle according to original's Kind
+ switch original.Kind() {
+ case reflect.Ptr:
+ // Get the actual value being pointed to.
+ originalValue := original.Elem()
+
+ // if it isn't valid, return.
+ if !originalValue.IsValid() {
+ return
+ }
+ cpy.Set(reflect.New(originalValue.Type()))
+ copyRecursive(originalValue, cpy.Elem())
+
+ case reflect.Interface:
+ // If this is a nil, don't do anything
+ if original.IsNil() {
+ return
+ }
+ // Get the value for the interface, not the pointer.
+ originalValue := original.Elem()
+
+ // Get the value by calling Elem().
+ copyValue := reflect.New(originalValue.Type()).Elem()
+ copyRecursive(originalValue, copyValue)
+ cpy.Set(copyValue)
+
+ case reflect.Struct:
+ t, ok := original.Interface().(time.Time)
+ if ok {
+ cpy.Set(reflect.ValueOf(t))
+ return
+ }
+ // Go through each field of the struct and copy it.
+ for i := 0; i < original.NumField(); i++ {
+ // The Type's StructField for a given field is checked to see if StructField.PkgPath
+ // is set to determine if the field is exported or not because CanSet() returns false
+ // for settable fields. I'm not sure why. -mohae
+ if original.Type().Field(i).PkgPath != "" {
+ continue
+ }
+ copyRecursive(original.Field(i), cpy.Field(i))
+ }
+
+ case reflect.Slice:
+ if original.IsNil() {
+ return
+ }
+ // Make a new slice and copy each element.
+ cpy.Set(reflect.MakeSlice(original.Type(), original.Len(), original.Cap()))
+ for i := 0; i < original.Len(); i++ {
+ copyRecursive(original.Index(i), cpy.Index(i))
+ }
+
+ case reflect.Map:
+ if original.IsNil() {
+ return
+ }
+ cpy.Set(reflect.MakeMap(original.Type()))
+ for _, key := range original.MapKeys() {
+ originalValue := original.MapIndex(key)
+ copyValue := reflect.New(originalValue.Type()).Elem()
+ copyRecursive(originalValue, copyValue)
+ copyKey := Copy(key.Interface())
+ cpy.SetMapIndex(reflect.ValueOf(copyKey), copyValue)
+ }
+
+ default:
+ cpy.Set(original)
+ }
+}
diff --git a/vendor/github.com/perimeterx/marshmallow/.gitignore b/vendor/github.com/perimeterx/marshmallow/.gitignore
new file mode 100644
index 00000000..cf53c0a1
--- /dev/null
+++ b/vendor/github.com/perimeterx/marshmallow/.gitignore
@@ -0,0 +1,4 @@
+/.idea
+
+coverage.out
+profile.out
diff --git a/vendor/github.com/perimeterx/marshmallow/CHANGELOG.md b/vendor/github.com/perimeterx/marshmallow/CHANGELOG.md
new file mode 100644
index 00000000..92937d05
--- /dev/null
+++ b/vendor/github.com/perimeterx/marshmallow/CHANGELOG.md
@@ -0,0 +1,49 @@
+# Changelog
+
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
+and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+
+## [[1.1.5](https://github.com/PerimeterX/marshmallow/compare/v1.1.4...v1.1.5)] - 2023-07-03
+
+### Added
+
+- Support for reporting errors from `HandleJSONData` - [info](https://github.com/PerimeterX/marshmallow/issues/27).
+
+## [[1.1.4](https://github.com/PerimeterX/marshmallow/compare/v1.1.3...v1.1.4)] - 2022-11-10
+
+### Fixed
+
+- Fixed problem with nested object implementing JSONDataHandler with skipPopulateStruct - [info](https://github.com/PerimeterX/marshmallow/issues/18).
+- Fixed problem with nested object implementing JSONDataHandler with skipPopulateStruct in ModeFailOverToOriginalValue - [info](https://github.com/PerimeterX/marshmallow/issues/19).
+
+## [[1.1.3](https://github.com/PerimeterX/marshmallow/compare/v1.1.2...v1.1.3)] - 2022-08-31
+
+### Added
+
+- Support for excluding known fields from the result map - [info](https://github.com/PerimeterX/marshmallow/issues/16).
+
+## [[1.1.2](https://github.com/PerimeterX/marshmallow/compare/v1.1.1...v1.1.2)] - 2022-08-23
+
+### Added
+
+- Support capturing nested unknown fields - [info](https://github.com/PerimeterX/marshmallow/issues/15).
+
+## [[1.1.1](https://github.com/PerimeterX/marshmallow/compare/v1.1.0...v1.1.1)] - 2022-08-21
+
+### Fixed
+
+- Fix parsing bug for unknown nested fields - [info](https://github.com/PerimeterX/marshmallow/issues/12).
+
+## [[1.1.0](https://github.com/PerimeterX/marshmallow/compare/v0.0.1...v1.1.0)] - 2022-07-10
+
+### Fixed
+
+- Fixed an issue with embedded fields - [info](https://github.com/PerimeterX/marshmallow/issues/9).
+
+## [[0.0.1](https://github.com/PerimeterX/marshmallow/tree/v0.0.1)] - 2022-04-21
+
+### Added
+
+- All functionality from our internal repository, after it has been stabilized on production for several months - [info](https://www.perimeterx.com/tech-blog/2022/boosting-up-json-performance-of-unstructured-structs-in-go/).
diff --git a/vendor/github.com/perimeterx/marshmallow/CODE_OF_CONDUCT.md b/vendor/github.com/perimeterx/marshmallow/CODE_OF_CONDUCT.md
new file mode 100644
index 00000000..0f6c45e7
--- /dev/null
+++ b/vendor/github.com/perimeterx/marshmallow/CODE_OF_CONDUCT.md
@@ -0,0 +1,133 @@
+
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+We as members, contributors, and leaders pledge to make participation in our
+community a harassment-free experience for everyone, regardless of age, body
+size, visible or invisible disability, ethnicity, sex characteristics, gender
+identity and expression, level of experience, education, socio-economic status,
+nationality, personal appearance, race, caste, color, religion, or sexual
+identity and orientation.
+
+We pledge to act and interact in ways that contribute to an open, welcoming,
+diverse, inclusive, and healthy community.
+
+## Our Standards
+
+Examples of behavior that contributes to a positive environment for our
+community include:
+
+* Demonstrating empathy and kindness toward other people
+* Being respectful of differing opinions, viewpoints, and experiences
+* Giving and gracefully accepting constructive feedback
+* Accepting responsibility and apologizing to those affected by our mistakes,
+ and learning from the experience
+* Focusing on what is best not just for us as individuals, but for the overall
+ community
+
+Examples of unacceptable behavior include:
+
+* The use of sexualized language or imagery, and sexual attention or advances of
+ any kind
+* Trolling, insulting or derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or email address,
+ without their explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Enforcement Responsibilities
+
+Community leaders are responsible for clarifying and enforcing our standards of
+acceptable behavior and will take appropriate and fair corrective action in
+response to any behavior that they deem inappropriate, threatening, offensive,
+or harmful.
+
+Community leaders have the right and responsibility to remove, edit, or reject
+comments, commits, code, wiki edits, issues, and other contributions that are
+not aligned to this Code of Conduct, and will communicate reasons for moderation
+decisions when appropriate.
+
+## Scope
+
+This Code of Conduct applies within all community spaces, and also applies when
+an individual is officially representing the community in public spaces.
+Examples of representing our community include using an official e-mail address,
+posting via an official social media account, or acting as an appointed
+representative at an online or offline event.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported to the community leaders responsible for enforcement at
+[opensource-conduct@humansecurity.com](mailto:opensource-conduct@humansecurity.com).
+All complaints will be reviewed and investigated promptly and fairly.
+
+All community leaders are obligated to respect the privacy and security of the
+reporter of any incident.
+
+## Enforcement Guidelines
+
+Community leaders will follow these Community Impact Guidelines in determining
+the consequences for any action they deem in violation of this Code of Conduct:
+
+### 1. Correction
+
+**Community Impact**: Use of inappropriate language or other behavior deemed
+unprofessional or unwelcome in the community.
+
+**Consequence**: A private, written warning from community leaders, providing
+clarity around the nature of the violation and an explanation of why the
+behavior was inappropriate. A public apology may be requested.
+
+### 2. Warning
+
+**Community Impact**: A violation through a single incident or series of
+actions.
+
+**Consequence**: A warning with consequences for continued behavior. No
+interaction with the people involved, including unsolicited interaction with
+those enforcing the Code of Conduct, for a specified period of time. This
+includes avoiding interactions in community spaces as well as external channels
+like social media. Violating these terms may lead to a temporary or permanent
+ban.
+
+### 3. Temporary Ban
+
+**Community Impact**: A serious violation of community standards, including
+sustained inappropriate behavior.
+
+**Consequence**: A temporary ban from any sort of interaction or public
+communication with the community for a specified period of time. No public or
+private interaction with the people involved, including unsolicited interaction
+with those enforcing the Code of Conduct, is allowed during this period.
+Violating these terms may lead to a permanent ban.
+
+### 4. Permanent Ban
+
+**Community Impact**: Demonstrating a pattern of violation of community
+standards, including sustained inappropriate behavior, harassment of an
+individual, or aggression toward or disparagement of classes of individuals.
+
+**Consequence**: A permanent ban from any sort of public interaction within the
+community.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage],
+version 2.1, available at
+[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].
+
+Community Impact Guidelines were inspired by
+[Mozilla's code of conduct enforcement ladder][Mozilla CoC].
+
+For answers to common questions about this code of conduct, see the FAQ at
+[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at
+[https://www.contributor-covenant.org/translations][translations].
+
+[homepage]: https://www.contributor-covenant.org
+[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html
+[Mozilla CoC]: https://github.com/mozilla/diversity
+[FAQ]: https://www.contributor-covenant.org/faq
+[translations]: https://www.contributor-covenant.org/translations
diff --git a/vendor/github.com/perimeterx/marshmallow/CONTRIBUTING.md b/vendor/github.com/perimeterx/marshmallow/CONTRIBUTING.md
new file mode 100644
index 00000000..a265c9ab
--- /dev/null
+++ b/vendor/github.com/perimeterx/marshmallow/CONTRIBUTING.md
@@ -0,0 +1,47 @@
+# How To Contribute
+
+We'd love to accept your patches and contributions to this project. There are just a few guidelines you need to follow which are described in detail below.
+
+## 1. Fork this repo
+
+You should create a fork of this project in your account and work from there. You can create a fork by clicking the fork button in GitHub.
+
+## 2. One feature, one branch
+
+Work for each new feature/issue should occur in its own branch. To create a new branch from the command line:
+```shell
+git checkout -b my-new-feature
+```
+where "my-new-feature" describes what you're working on.
+
+## 3. Add unit tests
+If your contribution modifies existing or adds new code please add corresponding unit tests for this.
+
+## 4. Ensure that the build passes
+
+Run
+```shell
+go test -v
+```
+and check that there are no errors.
+
+## 5. Add documentation for new or updated functionality
+
+Please review the [README.md](README.md) file in this project to see if they are impacted by your change and update them accordingly.
+
+## 6. Add to CHANGELOG.md
+
+Any notable changes should be recorded in the CHANGELOG.md following the [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) conventions.
+
+## 7. Submit a pull request and describe the change
+
+Push your changes to your branch and open a pull request against the parent repo on GitHub. The project administrators will review your pull request and respond with feedback.
+
+# How your contribution gets merged
+
+Upon pull request submission, your code will be reviewed by the maintainers. They will confirm at least the following:
+
+- Tests run successfully (unit, coverage, style).
+- Contribution policy has been followed.
+
+A (human) reviewer will need to sign off on your pull request before it can be merged.
diff --git a/vendor/github.com/perimeterx/marshmallow/LICENSE b/vendor/github.com/perimeterx/marshmallow/LICENSE
new file mode 100644
index 00000000..8ffe8691
--- /dev/null
+++ b/vendor/github.com/perimeterx/marshmallow/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2022 PerimeterX
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/perimeterx/marshmallow/README.md b/vendor/github.com/perimeterx/marshmallow/README.md
new file mode 100644
index 00000000..bfa90363
--- /dev/null
+++ b/vendor/github.com/perimeterx/marshmallow/README.md
@@ -0,0 +1,205 @@
+# Marshmallow
+
+![Marshmallow Campfire](https://raw.githubusercontent.com/PerimeterX/marshmallow/assets/campfire.png)
+
+[![CodeQL Status](https://img.shields.io/github/actions/workflow/status/perimeterx/marshmallow/codeql.yml?branch=main&logo=github&label=CodeQL)](https://github.com/PerimeterX/marshmallow/actions/workflows/codeql.yml?query=branch%3Amain++)
+[![Run Tests](https://img.shields.io/github/actions/workflow/status/perimeterx/marshmallow/go.yml?branch=main&logo=github&label=Run%20Tests)](https://github.com/PerimeterX/marshmallow/actions/workflows/go.yml?query=branch%3Amain)
+[![Dependency Review](https://img.shields.io/github/actions/workflow/status/perimeterx/marshmallow/dependency-review.yml?logo=github&label=Dependency%20Review)](https://github.com/PerimeterX/marshmallow/actions/workflows/dependency-review.yml?query=branch%3Amain)
+[![Go Report Card](https://goreportcard.com/badge/github.com/perimeterx/marshmallow)](https://goreportcard.com/report/github.com/perimeterx/marshmallow)
+![Manual Code Coverage](https://img.shields.io/badge/coverage-92.6%25-green)
+[![Go Reference](https://pkg.go.dev/badge/github.com/perimeterx/marshmallow.svg)](https://pkg.go.dev/github.com/perimeterx/marshmallow)
+[![Licence](https://img.shields.io/github/license/perimeterx/marshmallow)](LICENSE)
+[![Latest Release](https://img.shields.io/github/v/release/perimeterx/marshmallow)](https://github.com/PerimeterX/marshmallow/releases)
+![Top Languages](https://img.shields.io/github/languages/top/perimeterx/marshmallow)
+[![Issues](https://img.shields.io/github/issues-closed/perimeterx/marshmallow?color=%238250df&logo=github)](https://github.com/PerimeterX/marshmallow/issues)
+[![Pull Requests](https://img.shields.io/github/issues-pr-closed-raw/perimeterx/marshmallow?color=%238250df&label=merged%20pull%20requests&logo=github)](https://github.com/PerimeterX/marshmallow/pulls)
+[![Commits](https://img.shields.io/github/last-commit/perimeterx/marshmallow)](https://github.com/PerimeterX/marshmallow/commits/main)
+[![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-2.1-4baaaa.svg)](CODE_OF_CONDUCT.md)
+
+
+
+Marshmallow package provides a simple API to perform flexible and performant JSON unmarshalling in Go.
+
+Marshmallow specializes in dealing with **unstructured struct** - when some fields are known and some aren't,
+with zero performance overhead nor extra coding needed.
+While unmarshalling, marshmallow allows fully retaining the original data and access
+it via a typed struct and a dynamic map.
+
+## Contents
+
+- [Install](#install)
+- [Usage](#usage)
+- [Performance Benchmark And Alternatives](#performance-benchmark-and-alternatives)
+- [When Should I Use Marshmallow](#when-should-i-use-marshmallow)
+- [API](#api)
+
+## Install
+
+```sh
+go get -u github.com/perimeterx/marshmallow
+```
+
+## Usage
+
+```go
+package main
+
+import (
+ "fmt"
+ "github.com/perimeterx/marshmallow"
+)
+
+func main() {
+ v := struct {
+ Foo string `json:"foo"`
+ Boo []int `json:"boo"`
+ }{}
+ result, err := marshmallow.Unmarshal([]byte(`{"foo":"bar","boo":[1,2,3],"goo":12.6}`), &v)
+ fmt.Printf("v=%+v, result=%+v, err=%v", v, result, err)
+ // Output: v={Foo:bar Boo:[1 2 3]}, result=map[boo:[1 2 3] foo:bar goo:12.6], err=
+}
+```
+
+**Examples can be found [here](example_test.go)**
+
+## Performance Benchmark And Alternatives
+
+Marshmallow performs best when dealing with mixed data - when some fields are known and some are unknown.
+More info [below](#when-should-i-use-marshmallow).
+Other solutions are available for this kind of use case, each solution is explained and documented in the link below.
+The full benchmark test can be found
+[here](https://github.com/PerimeterX/marshmallow/blob/8c5bba9e6dc0033f4324eca554737089a99f6e5e/benchmark_test.go).
+
+|Benchmark|Iterations|Time/Iteration|Bytes Allocated|Allocations|
+|--|--|--|--|--|
+|[unmarshall twice](https://github.com/PerimeterX/marshmallow/blob/8c5bba9e6dc0033f4324eca554737089a99f6e5e/benchmark_test.go#L40)|228693|5164 ns/op|1640 B/op|51 allocs/op|
+|[raw map](https://github.com/PerimeterX/marshmallow/blob/8c5bba9e6dc0033f4324eca554737089a99f6e5e/benchmark_test.go#L66)|232236|5116 ns/op|2296 B/op|53 allocs/op|
+|[go codec](https://github.com/PerimeterX/marshmallow/blob/8c5bba9e6dc0033f4324eca554737089a99f6e5e/benchmark_test.go#L121)|388442|3077 ns/op|2512 B/op|37 allocs/op|
+|[marshmallow](https://github.com/PerimeterX/marshmallow/blob/8c5bba9e6dc0033f4324eca554737089a99f6e5e/benchmark_test.go#L16)|626168|1853 ns/op|608 B/op|18 allocs/op|
+|[marshmallow without populating struct](https://github.com/PerimeterX/marshmallow/blob/8c5bba9e6dc0033f4324eca554737089a99f6e5e/benchmark_test.go#L162)|678616|1751 ns/op|608 B/op|18 allocs/op|
+
+![marshmallow performance comparison](https://raw.githubusercontent.com/PerimeterX/marshmallow/e45088ca20d4ea5be4143d418d12da63a68d6dfd/performance-chart.svg)
+
+**Marshmallow provides the best performance (up to X3 faster) while not requiring any extra coding.**
+In fact, marshmallow performs as fast as normal `json.Unmarshal` call, however, such a call causes loss of data for all
+the fields that did not match the given struct. With marshmallow you never lose any data.
+
+|Benchmark|Iterations|Time/Iteration|Bytes Allocated|Allocations|
+|--|--|--|--|--|
+|[marshmallow](https://github.com/PerimeterX/marshmallow/blob/8c5bba9e6dc0033f4324eca554737089a99f6e5e/benchmark_test.go#L16)|626168|1853 ns/op|608 B/op|18 allocs/op|
+|[native library](https://github.com/PerimeterX/marshmallow/blob/8c5bba9e6dc0033f4324eca554737089a99f6e5e/benchmark_test.go#L143)|652106|1845 ns/op|304 B/op|11 allocs/op|
+|[marshmallow without populating struct](https://github.com/PerimeterX/marshmallow/blob/8c5bba9e6dc0033f4324eca554737089a99f6e5e/benchmark_test.go#L162)|678616|1751 ns/op|608 B/op|18 allocs/op|
+
+## When Should I Use Marshmallow
+
+Marshmallow is best suited for use cases where you are interested in all the input data, but you have predetermined
+information only about a subset of it. For instance, if you plan to reference two specific fields from the data, then
+iterate all the data and apply some generic logic. How does it look with the native library:
+
+```go
+func isAllowedToDrive(data []byte) (bool, error) {
+ result := make(map[string]interface{})
+ err := json.Unmarshal(data, &result)
+ if err != nil {
+ return false, err
+ }
+
+ age, ok := result["age"]
+ if !ok {
+ return false, nil
+ }
+ a, ok := age.(float64)
+ if !ok {
+ return false, nil
+ }
+ if a < 17 {
+ return false, nil
+ }
+
+ hasDriversLicense, ok := result["has_drivers_license"]
+ if !ok {
+ return false, nil
+ }
+ h, ok := hasDriversLicense.(bool)
+ if !ok {
+ return false, nil
+ }
+ if !h {
+ return false, nil
+ }
+
+ for key := range result {
+ if strings.Contains(key, "prior_conviction") {
+ return false, nil
+ }
+ }
+
+ return true, nil
+}
+```
+
+And with marshmallow:
+
+```go
+func isAllowedToDrive(data []byte) (bool, error) {
+ v := struct {
+ Age int `json:"age"`
+ HasDriversLicense bool `json:"has_drivers_license"`
+ }{}
+ result, err := marshmallow.Unmarshal(data, &v)
+ if err != nil {
+ return false, err
+ }
+
+ if v.Age < 17 || !v.HasDriversLicense {
+ return false, nil
+ }
+
+ for key := range result {
+ if strings.Contains(key, "prior_conviction") {
+ return false, nil
+ }
+ }
+
+ return true, nil
+}
+```
+
+## API
+
+Marshmallow exposes two main API functions -
+[Unmarshal](https://github.com/PerimeterX/marshmallow/blob/0e0218ab860be8a4b5f57f5ff239f281c250c5da/unmarshal.go#L27)
+and
+[UnmarshalFromJSONMap](https://github.com/PerimeterX/marshmallow/blob/0e0218ab860be8a4b5f57f5ff239f281c250c5da/unmarshal_from_json_map.go#L37).
+While unmarshalling, marshmallow supports the following optional options:
+
+* Setting the mode for handling invalid data using the [WithMode](https://github.com/PerimeterX/marshmallow/blob/0e0218ab860be8a4b5f57f5ff239f281c250c5da/options.go#L30) function.
+* Excluding known fields from the result map using the [WithExcludeKnownFieldsFromMap](https://github.com/PerimeterX/marshmallow/blob/457669ae9973895584f2636eabfc104140d3b700/options.go#L50) function.
+* Skipping struct population to boost performance using the [WithSkipPopulateStruct](https://github.com/PerimeterX/marshmallow/blob/0e0218ab860be8a4b5f57f5ff239f281c250c5da/options.go#L41) function.
+
+In order to capture unknown nested fields, structs must implement [JSONDataErrorHandler](https://github.com/PerimeterX/marshmallow/blob/195c994aa6e3e0852601ad9cf65bcddef0dd7479/options.go#L76).
+More info [here](https://github.com/PerimeterX/marshmallow/issues/15).
+
+Marshmallow also supports caching of refection information using
+[EnableCache](https://github.com/PerimeterX/marshmallow/blob/d3500aa5b0f330942b178b155da933c035dd3906/cache.go#L40)
+and
+[EnableCustomCache](https://github.com/PerimeterX/marshmallow/blob/d3500aa5b0f330942b178b155da933c035dd3906/cache.go#L35).
+
+## Contact and Contribute
+
+Reporting issues and requesting features may be done in our [GitHub issues page](https://github.com/PerimeterX/marshmallow/issues).
+Discussions may be conducted in our [GitHub discussions page](https://github.com/PerimeterX/marshmallow/discussions).
+For any further questions or comments you can reach us out at [open-source@humansecurity.com](mailto:open-source@humansecurity.com).
+
+Any type of contribution is warmly welcome and appreciated ❤️
+Please read our [contribution](CONTRIBUTING.md) guide for more info.
+
+If you're looking for something to get started with, tou can always follow our [issues page](https://github.com/PerimeterX/marshmallow/issues) and look for
+[good first issue](https://github.com/PerimeterX/marshmallow/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) and
+[help wanted](https://github.com/PerimeterX/marshmallow/issues?q=is%3Aissue+label%3A%22help+wanted%22+is%3Aopen) labels.
+
+## Marshmallow Logo
+
+Marshmallow logo and assets by [Adva Rom](https://www.linkedin.com/in/adva-rom-7a6738127/) are licensed under a Creative Commons Attribution 4.0 International License.
+
+![Marshmallow Logo](https://raw.githubusercontent.com/PerimeterX/marshmallow/assets/marshmallow.png)
diff --git a/vendor/github.com/perimeterx/marshmallow/cache.go b/vendor/github.com/perimeterx/marshmallow/cache.go
new file mode 100644
index 00000000..a67cea6d
--- /dev/null
+++ b/vendor/github.com/perimeterx/marshmallow/cache.go
@@ -0,0 +1,63 @@
+// Copyright 2022 PerimeterX. All rights reserved.
+// Use of this source code is governed by a MIT style
+// license that can be found in the LICENSE file.
+
+package marshmallow
+
+import (
+ "reflect"
+ "sync"
+)
+
+// Cache allows unmarshalling to use a cached version of refection information about types.
+// Cache interface follows the implementation of sync.Map, but you may wrap any cache implementation
+// to match it. This allows you to control max cache size, eviction policies and any other caching aspect.
+type Cache interface {
+ // Load returns the value stored in the map for a key, or nil if no value is present.
+ // The ok result indicates whether value was found in the map.
+ Load(key interface{}) (interface{}, bool)
+ // Store sets the value for a key.
+ Store(key, value interface{})
+}
+
+// EnableCustomCache enables unmarshalling cache. It allows reuse of refection information about types needed
+// to perform the unmarshalling. A use of such cache can boost up unmarshalling by x1.4.
+// Check out benchmark_test.go for an example.
+//
+// EnableCustomCache is not thread safe! Do not use it while performing unmarshalling, or it will
+// cause an unsafe race condition. Typically, EnableCustomCache should be called once when the process boots.
+//
+// Caching is disabled by default. The use of this function allows enabling it and controlling the
+// behavior of the cache. Typically, the use of sync.Map should be good enough. The caching mechanism
+// stores a single map per struct type. If you plan to unmarshal a huge amount of distinct
+// struct it may get to consume a lot of resources, in which case you have the control to choose
+// the caching implementation you like and its setup.
+func EnableCustomCache(c Cache) {
+ cache = c
+}
+
+// EnableCache enables unmarshalling cache with default implementation. More info at EnableCustomCache.
+func EnableCache() {
+ EnableCustomCache(&sync.Map{})
+}
+
+var cache Cache
+
+func cacheLookup(t reflect.Type) map[string]reflectionInfo {
+ if cache == nil {
+ return nil
+ }
+ value, exists := cache.Load(t)
+ if !exists {
+ return nil
+ }
+ result, _ := value.(map[string]reflectionInfo)
+ return result
+}
+
+func cacheStore(t reflect.Type, fields map[string]reflectionInfo) {
+ if cache == nil {
+ return
+ }
+ cache.Store(t, fields)
+}
diff --git a/vendor/github.com/perimeterx/marshmallow/doc.go b/vendor/github.com/perimeterx/marshmallow/doc.go
new file mode 100644
index 00000000..c179e657
--- /dev/null
+++ b/vendor/github.com/perimeterx/marshmallow/doc.go
@@ -0,0 +1,10 @@
+/*
+Package marshmallow provides a simple API to perform flexible and performant JSON unmarshalling.
+Unlike other packages, marshmallow supports unmarshalling of some known and some unknown fields
+with zero performance overhead nor extra coding needed. While unmarshalling,
+marshmallow allows fully retaining the original data and access it via a typed struct and a
+dynamic map.
+
+https://github.com/perimeterx/marshmallow
+*/
+package marshmallow
diff --git a/vendor/github.com/perimeterx/marshmallow/errors.go b/vendor/github.com/perimeterx/marshmallow/errors.go
new file mode 100644
index 00000000..c4d341cc
--- /dev/null
+++ b/vendor/github.com/perimeterx/marshmallow/errors.go
@@ -0,0 +1,101 @@
+// Copyright 2022 PerimeterX. All rights reserved.
+// Use of this source code is governed by a MIT style
+// license that can be found in the LICENSE file.
+
+package marshmallow
+
+import (
+ "errors"
+ "fmt"
+ "github.com/mailru/easyjson/jlexer"
+ "reflect"
+ "strings"
+)
+
+var (
+ // ErrInvalidInput indicates the input JSON is invalid
+ ErrInvalidInput = errors.New("invalid JSON input")
+
+ // ErrInvalidValue indicates the target struct has invalid type
+ ErrInvalidValue = errors.New("unexpected non struct value")
+)
+
+// MultipleLexerError indicates one or more unmarshalling errors during JSON bytes decode
+type MultipleLexerError struct {
+ Errors []*jlexer.LexerError
+}
+
+func (m *MultipleLexerError) Error() string {
+ errs := make([]string, len(m.Errors))
+ for i, lexerError := range m.Errors {
+ errs[i] = lexerError.Error()
+ }
+ return strings.Join(errs, ", ")
+}
+
+// MultipleError indicates one or more unmarshalling errors during JSON map decode
+type MultipleError struct {
+ Errors []error
+}
+
+func (m *MultipleError) Error() string {
+ errs := make([]string, len(m.Errors))
+ for i, lexerError := range m.Errors {
+ errs[i] = lexerError.Error()
+ }
+ return strings.Join(errs, ", ")
+}
+
+// ParseError indicates a JSON map decode error
+type ParseError struct {
+ Reason string
+ Path string
+}
+
+func (p *ParseError) Error() string {
+ return fmt.Sprintf("parse error: %s in %s", p.Reason, p.Path)
+}
+
+func newUnexpectedTypeParseError(expectedType reflect.Type, path []string) *ParseError {
+ return &ParseError{
+ Reason: fmt.Sprintf("expected type %s", externalTypeName(expectedType)),
+ Path: strings.Join(path, "."),
+ }
+}
+
+func newUnsupportedTypeParseError(unsupportedType reflect.Type, path []string) *ParseError {
+ return &ParseError{
+ Reason: fmt.Sprintf("unsupported type %s", externalTypeName(unsupportedType)),
+ Path: strings.Join(path, "."),
+ }
+}
+
+func addUnexpectedTypeLexerError(lexer *jlexer.Lexer, expectedType reflect.Type) {
+ lexer.AddNonFatalError(fmt.Errorf("expected type %s", externalTypeName(expectedType)))
+}
+
+func addUnsupportedTypeLexerError(lexer *jlexer.Lexer, unsupportedType reflect.Type) {
+ lexer.AddNonFatalError(fmt.Errorf("unsupported type %s", externalTypeName(unsupportedType)))
+}
+
+func externalTypeName(t reflect.Type) string {
+ switch t.Kind() {
+ case reflect.String:
+ return "string"
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint,
+ reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, reflect.Float32,
+ reflect.Float64, reflect.Complex64, reflect.Complex128:
+ return "number"
+ case reflect.Bool:
+ return "boolean"
+ case reflect.Array, reflect.Slice:
+ return "array"
+ case reflect.Interface:
+ return "any"
+ case reflect.Map, reflect.Struct:
+ return "object"
+ case reflect.Ptr:
+ return externalTypeName(t.Elem())
+ }
+ return "invalid"
+}
diff --git a/vendor/github.com/perimeterx/marshmallow/options.go b/vendor/github.com/perimeterx/marshmallow/options.go
new file mode 100644
index 00000000..ff97d336
--- /dev/null
+++ b/vendor/github.com/perimeterx/marshmallow/options.go
@@ -0,0 +1,96 @@
+// Copyright 2022 PerimeterX. All rights reserved.
+// Use of this source code is governed by a MIT style
+// license that can be found in the LICENSE file.
+
+package marshmallow
+
+// Mode dictates the unmarshalling mode.
+// Each mode is self documented below.
+type Mode uint8
+
+const (
+ // ModeFailOnFirstError is the default mode. It makes unmarshalling terminate
+ // immediately on any kind of error. This error will then be returned.
+ ModeFailOnFirstError Mode = iota
+
+ // ModeAllowMultipleErrors mode makes unmarshalling keep decoding even if
+ // errors are encountered. In case of such error, the erroneous value will be omitted from the result.
+ // Eventually, all errors will all be returned, alongside the partial result.
+ ModeAllowMultipleErrors
+
+ // ModeFailOverToOriginalValue mode makes unmarshalling keep decoding even if
+ // errors are encountered. In case of such error, the original external value be placed in the
+ // result data, even though it does not meet the schematic requirements.
+ // Eventually, all errors will be returned, alongside the full result. Note that the result map
+ // will contain values that do not match the struct schema.
+ ModeFailOverToOriginalValue
+)
+
+// WithMode is an UnmarshalOption function to set the unmarshalling mode.
+func WithMode(mode Mode) UnmarshalOption {
+ return func(options *unmarshalOptions) {
+ options.mode = mode
+ }
+}
+
+// WithSkipPopulateStruct is an UnmarshalOption function to set the skipPopulateStruct option.
+// Skipping populate struct is set to false by default.
+// If you do not intend to use the struct value once unmarshalling is finished, set this
+// option to true to boost performance. This would mean the struct fields will not be set
+// with values, but rather it will only be used as the target schema when populating the result map.
+func WithSkipPopulateStruct(skipPopulateStruct bool) UnmarshalOption {
+ return func(options *unmarshalOptions) {
+ options.skipPopulateStruct = skipPopulateStruct
+ }
+}
+
+// WithExcludeKnownFieldsFromMap is an UnmarshalOption function to set the excludeKnownFieldsFromMap option.
+// Exclude known fields flag is set to false by default.
+// When the flag is set to true, fields specified in the input struct (known fields) will be excluded from the result map
+func WithExcludeKnownFieldsFromMap(excludeKnownFields bool) UnmarshalOption {
+ return func(options *unmarshalOptions) {
+ options.excludeKnownFieldsFromMap = excludeKnownFields
+ }
+}
+
+type UnmarshalOption func(*unmarshalOptions)
+
+type unmarshalOptions struct {
+ mode Mode
+ skipPopulateStruct bool
+ excludeKnownFieldsFromMap bool
+}
+
+func buildUnmarshalOptions(options []UnmarshalOption) *unmarshalOptions {
+ result := &unmarshalOptions{}
+ for _, option := range options {
+ option(result)
+ }
+ return result
+}
+
+// JSONDataErrorHandler allow types to handle JSON data as maps.
+// Types should implement this interface if they wish to act on the map representation of parsed JSON input.
+// This is mainly used to allow nested objects to capture unknown fields and leverage marshmallow's abilities.
+// If HandleJSONData returns an error, it will be propagated as an unmarshal error
+type JSONDataErrorHandler interface {
+ HandleJSONData(data map[string]interface{}) error
+}
+
+// Deprecated: use JSONDataErrorHandler instead
+type JSONDataHandler interface {
+ HandleJSONData(data map[string]interface{})
+}
+
+func asJSONDataHandler(value interface{}) (func(map[string]interface{}) error, bool) {
+ if handler, ok := value.(JSONDataErrorHandler); ok {
+ return handler.HandleJSONData, true
+ }
+ if handler, ok := value.(JSONDataHandler); ok {
+ return func(m map[string]interface{}) error {
+ handler.HandleJSONData(m)
+ return nil
+ }, true
+ }
+ return nil, false
+}
diff --git a/vendor/github.com/perimeterx/marshmallow/reflection.go b/vendor/github.com/perimeterx/marshmallow/reflection.go
new file mode 100644
index 00000000..9b7d88ce
--- /dev/null
+++ b/vendor/github.com/perimeterx/marshmallow/reflection.go
@@ -0,0 +1,197 @@
+// Copyright 2022 PerimeterX. All rights reserved.
+// Use of this source code is governed by a MIT style
+// license that can be found in the LICENSE file.
+
+package marshmallow
+
+import (
+ "encoding/json"
+ "reflect"
+ "strings"
+)
+
+var unmarshalerType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()
+
+type reflectionInfo struct {
+ path []int
+ t reflect.Type
+}
+
+func (r reflectionInfo) field(target reflect.Value) reflect.Value {
+ current := target
+ for _, i := range r.path {
+ current = current.Field(i)
+ }
+ return current
+}
+
+func mapStructFields(target interface{}) map[string]reflectionInfo {
+ t := reflectStructType(target)
+ result := cacheLookup(t)
+ if result != nil {
+ return result
+ }
+ result = make(map[string]reflectionInfo, t.NumField())
+ mapTypeFields(t, result, nil)
+ cacheStore(t, result)
+ return result
+}
+
+func mapTypeFields(t reflect.Type, result map[string]reflectionInfo, path []int) {
+ num := t.NumField()
+ for i := 0; i < num; i++ {
+ field := t.Field(i)
+ fieldPath := append(path, i)
+ if field.Anonymous && field.Type.Kind() == reflect.Struct {
+ mapTypeFields(field.Type, result, fieldPath)
+ continue
+ }
+ name := field.Tag.Get("json")
+ if name == "" || name == "-" {
+ continue
+ }
+ if index := strings.Index(name, ","); index > -1 {
+ name = name[:index]
+ }
+ result[name] = reflectionInfo{
+ path: fieldPath,
+ t: field.Type,
+ }
+ }
+}
+
+func reflectStructValue(target interface{}) reflect.Value {
+ v := reflect.ValueOf(target)
+ for v.Kind() == reflect.Ptr {
+ v = v.Elem()
+ }
+ return v
+}
+
+func reflectStructType(target interface{}) reflect.Type {
+ t := reflect.TypeOf(target)
+ for t.Kind() == reflect.Ptr {
+ t = t.Elem()
+ }
+ return t
+}
+
+var primitiveConverters = map[reflect.Kind]func(v interface{}) (interface{}, bool){
+ reflect.Bool: func(v interface{}) (interface{}, bool) {
+ res, ok := v.(bool)
+ return res, ok
+ },
+ reflect.Int: func(v interface{}) (interface{}, bool) {
+ res, ok := v.(float64)
+ if ok {
+ return int(res), true
+ }
+ return v, false
+ },
+ reflect.Int8: func(v interface{}) (interface{}, bool) {
+ res, ok := v.(float64)
+ if ok {
+ return int8(res), true
+ }
+ return v, false
+ },
+ reflect.Int16: func(v interface{}) (interface{}, bool) {
+ res, ok := v.(float64)
+ if ok {
+ return int16(res), true
+ }
+ return v, false
+ },
+ reflect.Int32: func(v interface{}) (interface{}, bool) {
+ res, ok := v.(float64)
+ if ok {
+ return int32(res), true
+ }
+ return v, false
+ },
+ reflect.Int64: func(v interface{}) (interface{}, bool) {
+ res, ok := v.(float64)
+ if ok {
+ return int64(res), true
+ }
+ return v, false
+ },
+ reflect.Uint: func(v interface{}) (interface{}, bool) {
+ res, ok := v.(float64)
+ if ok {
+ return uint(res), true
+ }
+ return v, false
+ },
+ reflect.Uint8: func(v interface{}) (interface{}, bool) {
+ res, ok := v.(float64)
+ if ok {
+ return uint8(res), true
+ }
+ return v, false
+ },
+ reflect.Uint16: func(v interface{}) (interface{}, bool) {
+ res, ok := v.(float64)
+ if ok {
+ return uint16(res), true
+ }
+ return v, false
+ },
+ reflect.Uint32: func(v interface{}) (interface{}, bool) {
+ res, ok := v.(float64)
+ if ok {
+ return uint32(res), true
+ }
+ return v, false
+ },
+ reflect.Uint64: func(v interface{}) (interface{}, bool) {
+ res, ok := v.(float64)
+ if ok {
+ return uint64(res), true
+ }
+ return v, false
+ },
+ reflect.Float32: func(v interface{}) (interface{}, bool) {
+ res, ok := v.(float64)
+ if ok {
+ return float32(res), true
+ }
+ return v, false
+ },
+ reflect.Float64: func(v interface{}) (interface{}, bool) {
+ res, ok := v.(float64)
+ if ok {
+ return res, true
+ }
+ return v, false
+ },
+ reflect.Interface: func(v interface{}) (interface{}, bool) {
+ return v, true
+ },
+ reflect.String: func(v interface{}) (interface{}, bool) {
+ res, ok := v.(string)
+ return res, ok
+ },
+}
+
+func assignValue(field reflect.Value, value interface{}) {
+ if value == nil {
+ return
+ }
+ reflectValue := reflect.ValueOf(value)
+ if reflectValue.Type().AssignableTo(field.Type()) {
+ field.Set(reflectValue)
+ }
+}
+
+func isValidValue(v interface{}) bool {
+ value := reflect.ValueOf(v)
+ return value.Kind() == reflect.Ptr && value.Elem().Kind() == reflect.Struct && !value.IsNil()
+}
+
+func safeReflectValue(t reflect.Type, v interface{}) reflect.Value {
+ if v == nil {
+ return reflect.Zero(t)
+ }
+ return reflect.ValueOf(v)
+}
diff --git a/vendor/github.com/perimeterx/marshmallow/unmarshal.go b/vendor/github.com/perimeterx/marshmallow/unmarshal.go
new file mode 100644
index 00000000..160ea30c
--- /dev/null
+++ b/vendor/github.com/perimeterx/marshmallow/unmarshal.go
@@ -0,0 +1,383 @@
+// Copyright 2022 PerimeterX. All rights reserved.
+// Use of this source code is governed by a MIT style
+// license that can be found in the LICENSE file.
+
+package marshmallow
+
+import (
+ "encoding/json"
+ "github.com/mailru/easyjson/jlexer"
+ "reflect"
+)
+
+// Unmarshal parses the JSON-encoded object in data and stores the values
+// in the struct pointed to by v and in the returned map.
+// If v is nil or not a pointer to a struct, Unmarshal returns an ErrInvalidValue.
+// If data is not a valid JSON or not a JSON object Unmarshal returns an ErrInvalidInput.
+//
+// Unmarshal follows the rules of json.Unmarshal with the following exceptions:
+// - All input fields are stored in the resulting map, including fields that do not exist in the
+// struct pointed by v.
+// - Unmarshal only operates on JSON object inputs. It will reject all other types of input
+// by returning ErrInvalidInput.
+// - Unmarshal only operates on struct values. It will reject all other types of v by
+// returning ErrInvalidValue.
+// - Unmarshal supports three types of Mode values. Each mode is self documented and affects
+// how Unmarshal behaves.
+func Unmarshal(data []byte, v interface{}, options ...UnmarshalOption) (map[string]interface{}, error) {
+ if !isValidValue(v) {
+ return nil, ErrInvalidValue
+ }
+ opts := buildUnmarshalOptions(options)
+ useMultipleErrors := opts.mode == ModeAllowMultipleErrors || opts.mode == ModeFailOverToOriginalValue
+ d := &decoder{options: opts, lexer: &jlexer.Lexer{Data: data, UseMultipleErrors: useMultipleErrors}}
+ result := make(map[string]interface{})
+ if d.lexer.IsNull() {
+ d.lexer.Skip()
+ } else if !d.lexer.IsDelim('{') {
+ return nil, ErrInvalidInput
+ } else {
+ d.populateStruct(false, v, result)
+ }
+ d.lexer.Consumed()
+ if useMultipleErrors {
+ errors := d.lexer.GetNonFatalErrors()
+ if len(errors) == 0 {
+ return result, nil
+ }
+ return result, &MultipleLexerError{Errors: errors}
+ }
+ err := d.lexer.Error()
+ if err != nil {
+ return nil, err
+ }
+ return result, nil
+}
+
+type decoder struct {
+ options *unmarshalOptions
+ lexer *jlexer.Lexer
+}
+
+func (d *decoder) populateStruct(forcePopulate bool, structInstance interface{}, result map[string]interface{}) (interface{}, bool) {
+ doPopulate := !d.options.skipPopulateStruct || forcePopulate
+ var structValue reflect.Value
+ if doPopulate {
+ structValue = reflectStructValue(structInstance)
+ }
+ fields := mapStructFields(structInstance)
+ var clone map[string]interface{}
+ if d.options.mode == ModeFailOverToOriginalValue {
+ clone = make(map[string]interface{}, len(fields))
+ }
+ d.lexer.Delim('{')
+ for !d.lexer.IsDelim('}') {
+ key := d.lexer.UnsafeFieldName(false)
+ d.lexer.WantColon()
+ refInfo, exists := fields[key]
+ if exists {
+ value, isValidType := d.valueByReflectType(refInfo.t)
+ if isValidType {
+ if value != nil && doPopulate {
+ field := refInfo.field(structValue)
+ assignValue(field, value)
+ }
+ if !d.options.excludeKnownFieldsFromMap {
+ if result != nil {
+ result[key] = value
+ }
+ if clone != nil {
+ clone[key] = value
+ }
+ }
+ } else {
+ switch d.options.mode {
+ case ModeFailOnFirstError:
+ return nil, false
+ case ModeFailOverToOriginalValue:
+ if !forcePopulate {
+ result[key] = value
+ } else {
+ clone[key] = value
+ d.lexer.WantComma()
+ d.drainLexerMap(clone)
+ return clone, false
+ }
+ }
+ }
+ } else {
+ value := d.lexer.Interface()
+ if result != nil {
+ result[key] = value
+ }
+ if clone != nil {
+ clone[key] = value
+ }
+ }
+ d.lexer.WantComma()
+ }
+ d.lexer.Delim('}')
+ return structInstance, true
+}
+
+func (d *decoder) valueByReflectType(t reflect.Type) (interface{}, bool) {
+ if t.Implements(unmarshalerType) {
+ result := reflect.New(t.Elem()).Interface()
+ d.valueFromCustomUnmarshaler(result.(json.Unmarshaler))
+ return result, true
+ }
+ if reflect.PtrTo(t).Implements(unmarshalerType) {
+ value := reflect.New(t)
+ d.valueFromCustomUnmarshaler(value.Interface().(json.Unmarshaler))
+ return value.Elem().Interface(), true
+ }
+ kind := t.Kind()
+ if converter := primitiveConverters[kind]; converter != nil {
+ v := d.lexer.Interface()
+ if v == nil {
+ return nil, true
+ }
+ converted, ok := converter(v)
+ if !ok {
+ addUnexpectedTypeLexerError(d.lexer, t)
+ return v, false
+ }
+ return converted, true
+ }
+ switch kind {
+ case reflect.Slice:
+ return d.buildSlice(t)
+ case reflect.Array:
+ return d.buildArray(t)
+ case reflect.Map:
+ return d.buildMap(t)
+ case reflect.Struct:
+ value, valid := d.buildStruct(t)
+ if value == nil {
+ return nil, valid
+ }
+ if !valid {
+ return value, false
+ }
+ return reflect.ValueOf(value).Elem().Interface(), valid
+ case reflect.Ptr:
+ if t.Elem().Kind() == reflect.Struct {
+ return d.buildStruct(t.Elem())
+ }
+ value, valid := d.valueByReflectType(t.Elem())
+ if value == nil {
+ return nil, valid
+ }
+ if !valid {
+ return value, false
+ }
+ result := reflect.New(reflect.TypeOf(value))
+ result.Elem().Set(reflect.ValueOf(value))
+ return result.Interface(), valid
+ }
+ addUnsupportedTypeLexerError(d.lexer, t)
+ return nil, false
+}
+
+func (d *decoder) buildSlice(sliceType reflect.Type) (interface{}, bool) {
+ if d.lexer.IsNull() {
+ d.lexer.Skip()
+ return nil, true
+ }
+ if !d.lexer.IsDelim('[') {
+ addUnexpectedTypeLexerError(d.lexer, sliceType)
+ return d.lexer.Interface(), false
+ }
+ elemType := sliceType.Elem()
+ d.lexer.Delim('[')
+ var sliceValue reflect.Value
+ if !d.lexer.IsDelim(']') {
+ sliceValue = reflect.MakeSlice(sliceType, 0, 4)
+ } else {
+ sliceValue = reflect.MakeSlice(sliceType, 0, 0)
+ }
+ for !d.lexer.IsDelim(']') {
+ current, valid := d.valueByReflectType(elemType)
+ if !valid {
+ if d.options.mode != ModeFailOverToOriginalValue {
+ d.drainLexerArray(nil)
+ return nil, true
+ }
+ result := d.cloneReflectArray(sliceValue, -1)
+ result = append(result, current)
+ return d.drainLexerArray(result), true
+ }
+ sliceValue = reflect.Append(sliceValue, safeReflectValue(elemType, current))
+ d.lexer.WantComma()
+ }
+ d.lexer.Delim(']')
+ return sliceValue.Interface(), true
+}
+
+func (d *decoder) buildArray(arrayType reflect.Type) (interface{}, bool) {
+ if d.lexer.IsNull() {
+ d.lexer.Skip()
+ return nil, true
+ }
+ if !d.lexer.IsDelim('[') {
+ addUnexpectedTypeLexerError(d.lexer, arrayType)
+ return d.lexer.Interface(), false
+ }
+ elemType := arrayType.Elem()
+ arrayValue := reflect.New(arrayType).Elem()
+ d.lexer.Delim('[')
+ for i := 0; !d.lexer.IsDelim(']'); i++ {
+ current, valid := d.valueByReflectType(elemType)
+ if !valid {
+ if d.options.mode != ModeFailOverToOriginalValue {
+ d.drainLexerArray(nil)
+ return nil, true
+ }
+ result := d.cloneReflectArray(arrayValue, i)
+ result = append(result, current)
+ return d.drainLexerArray(result), true
+ }
+ if current != nil {
+ arrayValue.Index(i).Set(reflect.ValueOf(current))
+ }
+ d.lexer.WantComma()
+ }
+ d.lexer.Delim(']')
+ return arrayValue.Interface(), true
+}
+
+func (d *decoder) buildMap(mapType reflect.Type) (interface{}, bool) {
+ if d.lexer.IsNull() {
+ d.lexer.Skip()
+ return nil, true
+ }
+ if !d.lexer.IsDelim('{') {
+ addUnexpectedTypeLexerError(d.lexer, mapType)
+ return d.lexer.Interface(), false
+ }
+ d.lexer.Delim('{')
+ keyType := mapType.Key()
+ valueType := mapType.Elem()
+ mapValue := reflect.MakeMap(mapType)
+ for !d.lexer.IsDelim('}') {
+ key, valid := d.valueByReflectType(keyType)
+ if !valid {
+ if d.options.mode != ModeFailOverToOriginalValue {
+ d.lexer.WantColon()
+ d.lexer.Interface()
+ d.lexer.WantComma()
+ d.drainLexerMap(make(map[string]interface{}))
+ return nil, true
+ }
+ strKey, _ := key.(string)
+ d.lexer.WantColon()
+ value := d.lexer.Interface()
+ result := d.cloneReflectMap(mapValue)
+ result[strKey] = value
+ d.lexer.WantComma()
+ d.drainLexerMap(result)
+ return result, true
+ }
+ d.lexer.WantColon()
+ value, valid := d.valueByReflectType(valueType)
+ if !valid {
+ if d.options.mode != ModeFailOverToOriginalValue {
+ d.lexer.WantComma()
+ d.drainLexerMap(make(map[string]interface{}))
+ return nil, true
+ }
+ strKey, _ := key.(string)
+ result := d.cloneReflectMap(mapValue)
+ result[strKey] = value
+ d.lexer.WantComma()
+ d.drainLexerMap(result)
+ return result, true
+ }
+ mapValue.SetMapIndex(safeReflectValue(keyType, key), safeReflectValue(valueType, value))
+ d.lexer.WantComma()
+ }
+ d.lexer.Delim('}')
+ return mapValue.Interface(), true
+}
+
+func (d *decoder) buildStruct(structType reflect.Type) (interface{}, bool) {
+ if d.lexer.IsNull() {
+ d.lexer.Skip()
+ return nil, true
+ }
+ if !d.lexer.IsDelim('{') {
+ addUnexpectedTypeLexerError(d.lexer, structType)
+ return d.lexer.Interface(), false
+ }
+ value := reflect.New(structType).Interface()
+ handler, ok := asJSONDataHandler(value)
+ if !ok {
+ return d.populateStruct(true, value, nil)
+ }
+ data := make(map[string]interface{})
+ result, valid := d.populateStruct(true, value, data)
+ if !valid {
+ return result, false
+ }
+ err := handler(data)
+ if err != nil {
+ d.lexer.AddNonFatalError(err)
+ return result, false
+ }
+ return result, true
+}
+
+func (d *decoder) valueFromCustomUnmarshaler(unmarshaler json.Unmarshaler) {
+ data := d.lexer.Raw()
+ if !d.lexer.Ok() {
+ return
+ }
+ err := unmarshaler.UnmarshalJSON(data)
+ if err != nil {
+ d.lexer.AddNonFatalError(err)
+ }
+}
+
+func (d *decoder) cloneReflectArray(value reflect.Value, length int) []interface{} {
+ if length == -1 {
+ length = value.Len()
+ }
+ result := make([]interface{}, length)
+ for i := 0; i < length; i++ {
+ result[i] = value.Index(i).Interface()
+ }
+ return result
+}
+
+func (d *decoder) cloneReflectMap(mapValue reflect.Value) map[string]interface{} {
+ l := mapValue.Len()
+ result := make(map[string]interface{}, l)
+ for _, key := range mapValue.MapKeys() {
+ value := mapValue.MapIndex(key)
+ strKey, _ := key.Interface().(string)
+ result[strKey] = value.Interface()
+ }
+ return result
+}
+
+func (d *decoder) drainLexerArray(target []interface{}) interface{} {
+ d.lexer.WantComma()
+ for !d.lexer.IsDelim(']') {
+ current := d.lexer.Interface()
+ target = append(target, current)
+ d.lexer.WantComma()
+ }
+ d.lexer.Delim(']')
+ return target
+}
+
+func (d *decoder) drainLexerMap(target map[string]interface{}) {
+ for !d.lexer.IsDelim('}') {
+ key := d.lexer.String()
+ d.lexer.WantColon()
+ value := d.lexer.Interface()
+ target[key] = value
+ d.lexer.WantComma()
+ }
+ d.lexer.Delim('}')
+}
diff --git a/vendor/github.com/perimeterx/marshmallow/unmarshal_from_json_map.go b/vendor/github.com/perimeterx/marshmallow/unmarshal_from_json_map.go
new file mode 100644
index 00000000..0907f8f8
--- /dev/null
+++ b/vendor/github.com/perimeterx/marshmallow/unmarshal_from_json_map.go
@@ -0,0 +1,295 @@
+// Copyright 2022 PerimeterX. All rights reserved.
+// Use of this source code is governed by a MIT style
+// license that can be found in the LICENSE file.
+
+package marshmallow
+
+import (
+ "reflect"
+)
+
+// UnmarshalerFromJSONMap is the interface implemented by types
+// that can unmarshal a JSON description of themselves.
+// In case you want to implement custom unmarshalling, json.Unmarshaler only supports
+// receiving the data as []byte. However, while unmarshalling from JSON map,
+// the data is not available as a raw []byte and converting to it will significantly
+// hurt performance. Thus, if you wish to implement a custom unmarshalling on a type
+// that is being unmarshalled from a JSON map, you need to implement
+// UnmarshalerFromJSONMap interface.
+type UnmarshalerFromJSONMap interface {
+ UnmarshalJSONFromMap(data interface{}) error
+}
+
+// UnmarshalFromJSONMap parses the JSON map data and stores the values
+// in the struct pointed to by v and in the returned map.
+// If v is nil or not a pointer to a struct, UnmarshalFromJSONMap returns an ErrInvalidValue.
+//
+// UnmarshalFromJSONMap follows the rules of json.Unmarshal with the following exceptions:
+// - All input fields are stored in the resulting map, including fields that do not exist in the
+// struct pointed by v.
+// - UnmarshalFromJSONMap receive a JSON map instead of raw bytes. The given input map is assumed
+// to be a JSON map, meaning it should only contain the following types: bool, string, float64,
+// []interface, and map[string]interface{}. Other types will cause decoding to return unexpected results.
+// - UnmarshalFromJSONMap only operates on struct values. It will reject all other types of v by
+// returning ErrInvalidValue.
+// - UnmarshalFromJSONMap supports three types of Mode values. Each mode is self documented and affects
+// how UnmarshalFromJSONMap behaves.
+func UnmarshalFromJSONMap(data map[string]interface{}, v interface{}, options ...UnmarshalOption) (map[string]interface{}, error) {
+ if !isValidValue(v) {
+ return nil, ErrInvalidValue
+ }
+ opts := buildUnmarshalOptions(options)
+ d := &mapDecoder{options: opts}
+ result := make(map[string]interface{})
+ if data != nil {
+ d.populateStruct(false, nil, data, v, result)
+ }
+ if opts.mode == ModeAllowMultipleErrors || opts.mode == ModeFailOverToOriginalValue {
+ if len(d.errs) == 0 {
+ return result, nil
+ }
+ return result, &MultipleError{Errors: d.errs}
+ }
+ if d.err != nil {
+ return nil, d.err
+ }
+ return result, nil
+}
+
+var unmarshalerFromJSONMapType = reflect.TypeOf((*UnmarshalerFromJSONMap)(nil)).Elem()
+
+type mapDecoder struct {
+ options *unmarshalOptions
+ err error
+ errs []error
+}
+
+func (m *mapDecoder) populateStruct(forcePopulate bool, path []string, data map[string]interface{}, structInstance interface{}, result map[string]interface{}) (interface{}, bool) {
+ doPopulate := !m.options.skipPopulateStruct || forcePopulate
+ var structValue reflect.Value
+ if doPopulate {
+ structValue = reflectStructValue(structInstance)
+ }
+ fields := mapStructFields(structInstance)
+ for key, inputValue := range data {
+ refInfo, exists := fields[key]
+ if exists {
+ value, isValidType := m.valueByReflectType(append(path, key), inputValue, refInfo.t)
+ if isValidType {
+ if value != nil && doPopulate {
+ field := refInfo.field(structValue)
+ assignValue(field, value)
+ }
+ if !m.options.excludeKnownFieldsFromMap {
+ if result != nil {
+ result[key] = value
+ }
+ }
+ } else {
+ switch m.options.mode {
+ case ModeFailOnFirstError:
+ return nil, false
+ case ModeFailOverToOriginalValue:
+ if !forcePopulate {
+ result[key] = value
+ } else {
+ return data, false
+ }
+ }
+ }
+ } else {
+ if result != nil {
+ result[key] = inputValue
+ }
+ }
+ }
+ return structInstance, true
+}
+
+func (m *mapDecoder) valueByReflectType(path []string, v interface{}, t reflect.Type) (interface{}, bool) {
+ if t.Implements(unmarshalerFromJSONMapType) {
+ result := reflect.New(t.Elem()).Interface()
+ m.valueFromCustomUnmarshaler(v, result.(UnmarshalerFromJSONMap))
+ return result, true
+ }
+ if reflect.PtrTo(t).Implements(unmarshalerFromJSONMapType) {
+ value := reflect.New(t)
+ m.valueFromCustomUnmarshaler(v, value.Interface().(UnmarshalerFromJSONMap))
+ return value.Elem().Interface(), true
+ }
+ kind := t.Kind()
+ if converter := primitiveConverters[kind]; converter != nil {
+ if v == nil {
+ return nil, true
+ }
+ converted, ok := converter(v)
+ if !ok {
+ m.addError(newUnexpectedTypeParseError(t, path))
+ return v, false
+ }
+ return converted, true
+ }
+ switch kind {
+ case reflect.Slice:
+ return m.buildSlice(path, v, t)
+ case reflect.Array:
+ return m.buildArray(path, v, t)
+ case reflect.Map:
+ return m.buildMap(path, v, t)
+ case reflect.Struct:
+ value, valid := m.buildStruct(path, v, t)
+ if value == nil {
+ return nil, valid
+ }
+ if !valid {
+ return value, false
+ }
+ return reflect.ValueOf(value).Elem().Interface(), valid
+ case reflect.Ptr:
+ if t.Elem().Kind() == reflect.Struct {
+ return m.buildStruct(path, v, t.Elem())
+ }
+ value, valid := m.valueByReflectType(path, v, t.Elem())
+ if value == nil {
+ return nil, valid
+ }
+ if !valid {
+ return value, false
+ }
+ result := reflect.New(reflect.TypeOf(value))
+ result.Elem().Set(reflect.ValueOf(value))
+ return result.Interface(), valid
+ }
+ m.addError(newUnsupportedTypeParseError(t, path))
+ return nil, false
+}
+
+func (m *mapDecoder) buildSlice(path []string, v interface{}, sliceType reflect.Type) (interface{}, bool) {
+ if v == nil {
+ return nil, true
+ }
+ arr, ok := v.([]interface{})
+ if !ok {
+ m.addError(newUnexpectedTypeParseError(sliceType, path))
+ return v, false
+ }
+ elemType := sliceType.Elem()
+ var sliceValue reflect.Value
+ if len(arr) > 0 {
+ sliceValue = reflect.MakeSlice(sliceType, 0, 4)
+ } else {
+ sliceValue = reflect.MakeSlice(sliceType, 0, 0)
+ }
+ for _, element := range arr {
+ current, valid := m.valueByReflectType(path, element, elemType)
+ if !valid {
+ if m.options.mode != ModeFailOverToOriginalValue {
+ return nil, true
+ }
+ return v, true
+ }
+ sliceValue = reflect.Append(sliceValue, safeReflectValue(elemType, current))
+ }
+ return sliceValue.Interface(), true
+}
+
+func (m *mapDecoder) buildArray(path []string, v interface{}, arrayType reflect.Type) (interface{}, bool) {
+ if v == nil {
+ return nil, true
+ }
+ arr, ok := v.([]interface{})
+ if !ok {
+ m.addError(newUnexpectedTypeParseError(arrayType, path))
+ return v, false
+ }
+ elemType := arrayType.Elem()
+ arrayValue := reflect.New(arrayType).Elem()
+ for i, element := range arr {
+ current, valid := m.valueByReflectType(path, element, elemType)
+ if !valid {
+ if m.options.mode != ModeFailOverToOriginalValue {
+ return nil, true
+ }
+ return v, true
+ }
+ if current != nil {
+ arrayValue.Index(i).Set(reflect.ValueOf(current))
+ }
+ }
+ return arrayValue.Interface(), true
+}
+
+func (m *mapDecoder) buildMap(path []string, v interface{}, mapType reflect.Type) (interface{}, bool) {
+ if v == nil {
+ return nil, true
+ }
+ mp, ok := v.(map[string]interface{})
+ if !ok {
+ m.addError(newUnexpectedTypeParseError(mapType, path))
+ return v, false
+ }
+ keyType := mapType.Key()
+ valueType := mapType.Elem()
+ mapValue := reflect.MakeMap(mapType)
+ for inputKey, inputValue := range mp {
+ keyPath := append(path, inputKey)
+ key, valid := m.valueByReflectType(keyPath, inputKey, keyType)
+ if !valid {
+ if m.options.mode != ModeFailOverToOriginalValue {
+ return nil, true
+ }
+ return v, true
+ }
+ value, valid := m.valueByReflectType(keyPath, inputValue, valueType)
+ if !valid {
+ if m.options.mode != ModeFailOverToOriginalValue {
+ return nil, true
+ }
+ return v, true
+ }
+ mapValue.SetMapIndex(safeReflectValue(keyType, key), safeReflectValue(valueType, value))
+ }
+ return mapValue.Interface(), true
+}
+
+func (m *mapDecoder) buildStruct(path []string, v interface{}, structType reflect.Type) (interface{}, bool) {
+ if v == nil {
+ return nil, true
+ }
+ mp, ok := v.(map[string]interface{})
+ if !ok {
+ m.addError(newUnexpectedTypeParseError(structType, path))
+ return v, false
+ }
+ value := reflect.New(structType).Interface()
+ handler, ok := asJSONDataHandler(value)
+ if !ok {
+ return m.populateStruct(true, path, mp, value, nil)
+ }
+ data := make(map[string]interface{})
+ result, valid := m.populateStruct(true, path, mp, value, data)
+ if !valid {
+ return result, false
+ }
+ err := handler(data)
+ if err != nil {
+ m.addError(err)
+ return result, false
+ }
+ return result, true
+}
+
+func (m *mapDecoder) valueFromCustomUnmarshaler(data interface{}, unmarshaler UnmarshalerFromJSONMap) {
+ err := unmarshaler.UnmarshalJSONFromMap(data)
+ if err != nil {
+ m.addError(err)
+ }
+}
+
+func (m *mapDecoder) addError(err error) {
+ if m.options.mode == ModeFailOnFirstError {
+ m.err = err
+ } else {
+ m.errs = append(m.errs, err)
+ }
+}
diff --git a/vendor/modules.txt b/vendor/modules.txt
index 2f63812b..e81de125 100644
--- a/vendor/modules.txt
+++ b/vendor/modules.txt
@@ -78,6 +78,15 @@ github.com/dlclark/regexp2/syntax
# github.com/fsnotify/fsnotify v1.7.0
## explicit; go 1.17
github.com/fsnotify/fsnotify
+# github.com/getkin/kin-openapi v0.124.0
+## explicit; go 1.20
+github.com/getkin/kin-openapi/openapi3
+# github.com/go-openapi/jsonpointer v0.20.2
+## explicit; go 1.19
+github.com/go-openapi/jsonpointer
+# github.com/go-openapi/swag v0.22.8
+## explicit; go 1.19
+github.com/go-openapi/swag
# github.com/golang/protobuf v1.5.3
## explicit; go 1.9
github.com/golang/protobuf/proto
@@ -102,6 +111,12 @@ github.com/hashicorp/hcl/json/token
# github.com/inconshreveable/mousetrap v1.1.0
## explicit; go 1.18
github.com/inconshreveable/mousetrap
+# github.com/invopop/yaml v0.2.0
+## explicit; go 1.14
+github.com/invopop/yaml
+# github.com/josharian/intern v1.0.0
+## explicit; go 1.5
+github.com/josharian/intern
# github.com/launchdarkly/api-client-go/v14 v14.0.0
## explicit; go 1.13
github.com/launchdarkly/api-client-go/v14
@@ -111,6 +126,11 @@ github.com/lucasb-eyer/go-colorful
# github.com/magiconair/properties v1.8.7
## explicit; go 1.19
github.com/magiconair/properties
+# github.com/mailru/easyjson v0.7.7
+## explicit; go 1.12
+github.com/mailru/easyjson/buffer
+github.com/mailru/easyjson/jlexer
+github.com/mailru/easyjson/jwriter
# github.com/mattn/go-isatty v0.0.18
## explicit; go 1.15
github.com/mattn/go-isatty
@@ -130,6 +150,9 @@ github.com/mitchellh/go-homedir
# github.com/mitchellh/mapstructure v1.5.0
## explicit; go 1.14
github.com/mitchellh/mapstructure
+# github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826
+## explicit
+github.com/mohae/deepcopy
# github.com/muesli/ansi v0.0.0-20211018074035-2e021307bc4b
## explicit; go 1.17
github.com/muesli/ansi
@@ -158,6 +181,9 @@ github.com/pelletier/go-toml/v2/internal/characters
github.com/pelletier/go-toml/v2/internal/danger
github.com/pelletier/go-toml/v2/internal/tracker
github.com/pelletier/go-toml/v2/unstable
+# github.com/perimeterx/marshmallow v1.1.5
+## explicit; go 1.17
+github.com/perimeterx/marshmallow
# github.com/pkg/errors v0.9.1
## explicit
github.com/pkg/errors