From fd8a724e77123d5035d4070581dab777b4533ebb Mon Sep 17 00:00:00 2001 From: tobi <31960611+tsmethurst@users.noreply.github.com> Date: Fri, 26 Apr 2024 11:31:10 +0200 Subject: [PATCH] [chore] Bump go swagger (#2871) * bump go swagger version * bump swagger version --- .drone.yml | 8 +- go.mod | 40 +- go.sum | 189 +- .../Masterminds/semver/v3/.golangci.yml | 3 - .../github.com/Masterminds/semver/v3/Makefile | 17 +- .../Masterminds/semver/v3/README.md | 22 +- .../Masterminds/semver/v3/SECURITY.md | 19 + .../Masterminds/semver/v3/constraints.go | 2 +- .../github.com/Masterminds/semver/v3/fuzz.go | 22 - .../github.com/felixge/httpsnoop/.travis.yml | 6 - vendor/github.com/felixge/httpsnoop/Makefile | 2 +- vendor/github.com/felixge/httpsnoop/README.md | 4 +- .../felixge/httpsnoop/capture_metrics.go | 2 +- .../httpsnoop/wrap_generated_gteq_1.8.go | 2 +- .../httpsnoop/wrap_generated_lt_1.8.go | 2 +- .../go-openapi/analysis/.golangci.yml | 53 +- .../github.com/go-openapi/analysis/README.md | 10 +- .../go-openapi/analysis/appveyor.yml | 32 - vendor/github.com/go-openapi/analysis/doc.go | 10 +- .../github.com/go-openapi/analysis/flatten.go | 64 +- .../go-openapi/analysis/flatten_name.go | 39 +- .../go-openapi/analysis/flatten_options.go | 1 + .../analysis/internal/debug/debug.go | 4 +- .../internal/flatten/replace/replace.go | 42 +- .../analysis/internal/flatten/sortref/keys.go | 2 +- .../github.com/go-openapi/analysis/mixin.go | 16 +- .../github.com/go-openapi/analysis/schema.go | 12 +- .../go-openapi/errors/.golangci.yml | 52 +- vendor/github.com/go-openapi/errors/README.md | 5 +- vendor/github.com/go-openapi/errors/api.go | 18 +- vendor/github.com/go-openapi/errors/schema.go | 6 +- .../github.com/go-openapi/inflect/.gitignore | 5 + .../go-openapi/inflect/.golangci.yml | 61 + .../github.com/go-openapi/inflect/.hgignore | 1 - .../go-openapi/inflect/{LICENCE => LICENSE} | 0 vendor/github.com/go-openapi/inflect/README | 168 -- .../github.com/go-openapi/inflect/README.md | 18 + .../github.com/go-openapi/inflect/inflect.go | 37 +- .../go-openapi/jsonpointer/.golangci.yml | 61 + .../go-openapi/jsonpointer/README.md | 8 +- .../go-openapi/jsonpointer/pointer.go | 191 +- .../go-openapi/jsonreference/.golangci.yml | 59 +- .../go-openapi/jsonreference/README.md | 14 +- .../github.com/go-openapi/loads/.golangci.yml | 49 +- vendor/github.com/go-openapi/loads/README.md | 2 +- vendor/github.com/go-openapi/loads/doc.go | 9 +- vendor/github.com/go-openapi/loads/loaders.go | 9 +- vendor/github.com/go-openapi/loads/spec.go | 35 +- .../go-openapi/runtime/.golangci.yml | 56 +- .../github.com/go-openapi/runtime/README.md | 11 +- .../go-openapi/runtime/bytestream.go | 149 +- .../go-openapi/runtime/client_operation.go | 4 +- .../go-openapi/runtime/client_request.go | 4 +- vendor/github.com/go-openapi/runtime/csv.go | 337 +++- .../go-openapi/runtime/csv_options.go | 121 ++ .../go-openapi/runtime/logger/standard.go | 2 + .../go-openapi/runtime/middleware/context.go | 179 +- .../runtime/middleware/denco/router.go | 37 +- .../go-openapi/runtime/middleware/doc.go | 73 +- .../go-openapi/runtime/middleware/go18.go | 9 - .../runtime/middleware/header/header.go | 9 +- .../runtime/middleware/parameter.go | 22 +- .../go-openapi/runtime/middleware/pre_go18.go | 9 - .../go-openapi/runtime/middleware/rapidoc.go | 72 +- .../go-openapi/runtime/middleware/redoc.go | 69 +- .../go-openapi/runtime/middleware/request.go | 23 +- .../go-openapi/runtime/middleware/router.go | 109 +- .../go-openapi/runtime/middleware/spec.go | 78 +- .../runtime/middleware/swaggerui.go | 87 +- .../runtime/middleware/swaggerui_oauth2.go | 31 +- .../runtime/middleware/ui_options.go | 173 ++ .../runtime/middleware/untyped/api.go | 15 +- .../runtime/middleware/validation.go | 12 +- .../github.com/go-openapi/runtime/request.go | 14 +- .../runtime/security/authenticator.go | 19 +- vendor/github.com/go-openapi/spec/.gitignore | 3 +- .../github.com/go-openapi/spec/.golangci.yml | 21 +- vendor/github.com/go-openapi/spec/README.md | 28 +- .../github.com/go-openapi/spec/appveyor.yml | 32 - vendor/github.com/go-openapi/spec/bindata.go | 297 --- vendor/github.com/go-openapi/spec/embed.go | 17 + vendor/github.com/go-openapi/spec/expander.go | 77 +- .../go-openapi/spec/normalizer_nonwindows.go | 2 +- .../github.com/go-openapi/spec/operation.go | 5 +- .../github.com/go-openapi/spec/parameter.go | 42 +- .../go-openapi/spec/schema_loader.go | 9 +- .../spec/schemas/jsonschema-draft-04.json | 149 ++ .../go-openapi/spec/schemas/v2/schema.json | 1607 +++++++++++++++++ vendor/github.com/go-openapi/spec/spec.go | 6 +- vendor/github.com/go-openapi/spec/swagger.go | 4 +- vendor/github.com/go-openapi/spec/url_go18.go | 8 - vendor/github.com/go-openapi/spec/url_go19.go | 3 - .../go-openapi/strfmt/.golangci.yml | 92 +- vendor/github.com/go-openapi/strfmt/README.md | 5 +- vendor/github.com/go-openapi/strfmt/bson.go | 6 +- .../github.com/go-openapi/strfmt/default.go | 56 +- vendor/github.com/go-openapi/strfmt/format.go | 5 +- vendor/github.com/go-openapi/strfmt/time.go | 8 +- vendor/github.com/go-openapi/swag/.gitignore | 1 + .../github.com/go-openapi/swag/.golangci.yml | 54 +- .../github.com/go-openapi/swag/BENCHMARK.md | 52 + vendor/github.com/go-openapi/swag/README.md | 8 +- .../go-openapi/swag/initialism_index.go | 202 +++ vendor/github.com/go-openapi/swag/loading.go | 105 +- .../github.com/go-openapi/swag/name_lexem.go | 72 +- .../github.com/go-openapi/swag/post_go18.go | 24 - .../github.com/go-openapi/swag/post_go19.go | 68 - vendor/github.com/go-openapi/swag/pre_go18.go | 24 - vendor/github.com/go-openapi/swag/pre_go19.go | 70 - vendor/github.com/go-openapi/swag/split.go | 490 +++-- .../go-openapi/swag/string_bytes.go | 8 + vendor/github.com/go-openapi/swag/util.go | 210 +-- vendor/github.com/go-openapi/swag/yaml.go | 39 +- .../go-openapi/validate/.golangci.yml | 55 +- .../go-openapi/validate/BENCHMARK.md | 31 + .../github.com/go-openapi/validate/README.md | 8 +- .../go-openapi/validate/appveyor.yml | 32 - .../go-openapi/validate/default_validator.go | 109 +- vendor/github.com/go-openapi/validate/doc.go | 70 +- .../go-openapi/validate/example_validator.go | 67 +- .../github.com/go-openapi/validate/formats.go | 78 +- .../github.com/go-openapi/validate/helpers.go | 23 +- .../go-openapi/validate/object_validator.go | 480 +++-- .../github.com/go-openapi/validate/options.go | 21 +- .../github.com/go-openapi/validate/pools.go | 366 ++++ .../go-openapi/validate/pools_debug.go | 1012 +++++++++++ .../github.com/go-openapi/validate/result.go | 131 +- .../github.com/go-openapi/validate/schema.go | 260 ++- .../go-openapi/validate/schema_option.go | 31 +- .../go-openapi/validate/schema_props.go | 436 +++-- .../go-openapi/validate/slice_validator.go | 57 +- vendor/github.com/go-openapi/validate/spec.go | 188 +- .../go-openapi/validate/spec_messages.go | 12 +- vendor/github.com/go-openapi/validate/type.go | 72 +- .../go-openapi/validate/validator.go | 940 +++++++--- .../github.com/go-openapi/validate/values.go | 12 +- .../go-swagger/cmd/swagger/commands/diff.go | 3 +- .../cmd/swagger/commands/diff/checks.go | 10 +- .../swagger/commands/diff/compatibility.go | 5 +- .../cmd/swagger/commands/diff/difftypes.go | 4 + .../swagger/commands/diff/spec_analyser.go | 110 +- .../go-swagger/cmd/swagger/commands/expand.go | 5 +- .../cmd/swagger/commands/generate/model.go | 2 + .../cmd/swagger/commands/generate/shared.go | 4 +- .../cmd/swagger/commands/generate/spec.go | 25 +- .../swagger/commands/generate/spec_go111.go | 119 -- .../go-swagger/cmd/swagger/commands/serve.go | 2 +- .../go-swagger/codescan/application.go | 10 +- .../go-swagger/codescan/operations.go | 4 +- .../go-swagger/codescan/parameters.go | 15 +- .../go-swagger/go-swagger/codescan/parser.go | 34 +- .../go-swagger/codescan/parser_helpers.go | 3 - .../codescan/parser_helpers_go118.go | 42 - .../go-swagger/codescan/responses.go | 56 +- .../go-swagger/go-swagger/codescan/routes.go | 3 +- .../go-swagger/go-swagger/codescan/schema.go | 16 +- .../go-swagger/go-swagger/codescan/spec.go | 4 +- .../go-swagger/generator/bindata.go | 2 +- .../go-swagger/go-swagger/generator/config.go | 4 +- .../go-swagger/generator/formats.go | 2 + .../go-swagger/generator/genopts_nonwin.go | 2 - .../go-swagger/generator/language.go | 5 +- .../go-swagger/go-swagger/generator/media.go | 4 + .../go-swagger/go-swagger/generator/model.go | 79 +- .../go-swagger/generator/operation.go | 70 +- .../go-swagger/go-swagger/generator/shared.go | 59 +- .../go-swagger/go-swagger/generator/spec.go | 19 +- .../go-swagger/generator/structs.go | 15 + .../go-swagger/generator/support.go | 78 +- .../go-swagger/generator/template_repo.go | 167 +- .../generator/templates/cli/cli.gotmpl | 410 +++-- .../generator/templates/cli/main.gotmpl | 21 +- .../generator/templates/cli/modelcli.gotmpl | 11 +- .../generator/templates/cli/operation.gotmpl | 70 +- .../templates/cli/registerflag.gotmpl | 30 +- .../templates/cli/retrieveflag.gotmpl | 34 +- .../generator/templates/cli/schema.gotmpl | 50 +- .../generator/templates/client/client.gotmpl | 79 +- .../templates/client/response.gotmpl | 10 +- .../templates/contrib/stratoscale/README.md | 2 +- .../stratoscale/server/configureapi.gotmpl | 2 +- .../generator/templates/docstring.gotmpl | 6 +- .../generator/templates/markdown/docs.gotmpl | 28 +- .../templates/schemavalidator.gotmpl | 108 +- .../generator/templates/server/server.gotmpl | 2 +- .../templates/validation/customformat.gotmpl | 2 +- .../templates/validation/maximum.gotmpl | 12 +- .../templates/validation/minimum.gotmpl | 12 +- .../templates/validation/multipleOf.gotmpl | 12 +- .../templates/validation/primitive.gotmpl | 8 +- .../templates/validation/structfield.gotmpl | 10 +- .../go-swagger/go-swagger/generator/types.go | 9 +- .../go-swagger/go-swagger/scan/README.md | 3 - .../go-swagger/go-swagger/scan/classifier.go | 166 -- .../go-swagger/go-swagger/scan/doc.go | 89 - .../go-swagger/go-swagger/scan/enum.go | 84 - .../go-swagger/go-swagger/scan/meta.go | 246 --- .../go-swagger/go-swagger/scan/operations.go | 85 - .../go-swagger/go-swagger/scan/parameters.go | 515 ------ .../go-swagger/go-swagger/scan/path.go | 151 -- .../go-swagger/go-swagger/scan/responses.go | 453 ----- .../go-swagger/scan/route_params.go | 253 --- .../go-swagger/go-swagger/scan/routes.go | 146 -- .../go-swagger/go-swagger/scan/scanner.go | 974 ---------- .../go-swagger/go-swagger/scan/schema.go | 1358 -------------- .../go-swagger/go-swagger/scan/validators.go | 829 --------- .../github.com/gorilla/handlers/.editorconfig | 20 + vendor/github.com/gorilla/handlers/.gitignore | 2 + vendor/github.com/gorilla/handlers/LICENSE | 39 +- vendor/github.com/gorilla/handlers/Makefile | 34 + vendor/github.com/gorilla/handlers/README.md | 10 +- .../github.com/gorilla/handlers/canonical.go | 9 +- .../github.com/gorilla/handlers/compress.go | 8 +- vendor/github.com/gorilla/handlers/cors.go | 41 +- .../github.com/gorilla/handlers/handlers.go | 15 +- vendor/github.com/gorilla/handlers/logging.go | 32 +- .../gorilla/handlers/proxy_headers.go | 16 +- .../github.com/gorilla/handlers/recovery.go | 22 +- vendor/github.com/huandu/xstrings/convert.go | 73 +- vendor/github.com/huandu/xstrings/format.go | 28 +- .../github.com/huandu/xstrings/manipulate.go | 12 +- .../huandu/xstrings/stringbuilder.go | 3 +- .../huandu/xstrings/stringbuilder_go110.go | 3 +- .../github.com/huandu/xstrings/translate.go | 52 +- .../github.com/imdario/mergo/CONTRIBUTING.md | 112 ++ vendor/github.com/imdario/mergo/README.md | 53 +- vendor/github.com/imdario/mergo/SECURITY.md | 14 + vendor/github.com/imdario/mergo/map.go | 6 +- vendor/github.com/imdario/mergo/merge.go | 61 +- vendor/github.com/imdario/mergo/mergo.go | 15 +- .../github.com/shopspring/decimal/.gitignore | 3 + .../github.com/shopspring/decimal/.travis.yml | 10 +- .../shopspring/decimal/CHANGELOG.md | 32 +- .../github.com/shopspring/decimal/README.md | 2 +- .../github.com/shopspring/decimal/decimal.go | 473 ++++- .../github.com/shopspring/decimal/rounding.go | 65 +- vendor/golang.org/x/exp/slices/slices.go | 50 +- vendor/golang.org/x/sync/errgroup/errgroup.go | 135 ++ vendor/golang.org/x/sync/errgroup/go120.go | 13 + .../golang.org/x/sync/errgroup/pre_go120.go | 14 + .../x/tools/go/buildutil/allpackages.go | 195 -- .../x/tools/go/buildutil/fakecontext.go | 111 -- .../x/tools/go/buildutil/overlay.go | 101 -- .../golang.org/x/tools/go/buildutil/tags.go | 100 - .../golang.org/x/tools/go/buildutil/util.go | 209 --- .../golang.org/x/tools/go/internal/cgo/cgo.go | 219 --- .../x/tools/go/internal/cgo/cgo_pkgconfig.go | 42 - vendor/golang.org/x/tools/go/loader/doc.go | 202 --- vendor/golang.org/x/tools/go/loader/loader.go | 1066 ----------- vendor/golang.org/x/tools/go/loader/util.go | 123 -- vendor/modules.txt | 73 +- 251 files changed, 10841 insertions(+), 11896 deletions(-) create mode 100644 vendor/github.com/Masterminds/semver/v3/SECURITY.md delete mode 100644 vendor/github.com/Masterminds/semver/v3/fuzz.go delete mode 100644 vendor/github.com/felixge/httpsnoop/.travis.yml delete mode 100644 vendor/github.com/go-openapi/analysis/appveyor.yml create mode 100644 vendor/github.com/go-openapi/inflect/.gitignore create mode 100644 vendor/github.com/go-openapi/inflect/.golangci.yml delete mode 100644 vendor/github.com/go-openapi/inflect/.hgignore rename vendor/github.com/go-openapi/inflect/{LICENCE => LICENSE} (100%) delete mode 100644 vendor/github.com/go-openapi/inflect/README create mode 100644 vendor/github.com/go-openapi/inflect/README.md create mode 100644 vendor/github.com/go-openapi/jsonpointer/.golangci.yml create mode 100644 vendor/github.com/go-openapi/runtime/csv_options.go delete mode 100644 vendor/github.com/go-openapi/runtime/middleware/go18.go delete mode 100644 vendor/github.com/go-openapi/runtime/middleware/pre_go18.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/ui_options.go delete mode 100644 vendor/github.com/go-openapi/spec/appveyor.yml delete mode 100644 vendor/github.com/go-openapi/spec/bindata.go create mode 100644 vendor/github.com/go-openapi/spec/embed.go create mode 100644 vendor/github.com/go-openapi/spec/schemas/jsonschema-draft-04.json create mode 100644 vendor/github.com/go-openapi/spec/schemas/v2/schema.json delete mode 100644 vendor/github.com/go-openapi/spec/url_go18.go create mode 100644 vendor/github.com/go-openapi/swag/BENCHMARK.md create mode 100644 vendor/github.com/go-openapi/swag/initialism_index.go delete mode 100644 vendor/github.com/go-openapi/swag/post_go18.go delete mode 100644 vendor/github.com/go-openapi/swag/post_go19.go delete mode 100644 vendor/github.com/go-openapi/swag/pre_go18.go delete mode 100644 vendor/github.com/go-openapi/swag/pre_go19.go create mode 100644 vendor/github.com/go-openapi/swag/string_bytes.go create mode 100644 vendor/github.com/go-openapi/validate/BENCHMARK.md delete mode 100644 vendor/github.com/go-openapi/validate/appveyor.yml create mode 100644 vendor/github.com/go-openapi/validate/pools.go create mode 100644 vendor/github.com/go-openapi/validate/pools_debug.go delete mode 100644 vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec_go111.go delete mode 100644 vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers_go118.go delete mode 100644 vendor/github.com/go-swagger/go-swagger/scan/README.md delete mode 100644 vendor/github.com/go-swagger/go-swagger/scan/classifier.go delete mode 100644 vendor/github.com/go-swagger/go-swagger/scan/doc.go delete mode 100644 vendor/github.com/go-swagger/go-swagger/scan/enum.go delete mode 100644 vendor/github.com/go-swagger/go-swagger/scan/meta.go delete mode 100644 vendor/github.com/go-swagger/go-swagger/scan/operations.go delete mode 100644 vendor/github.com/go-swagger/go-swagger/scan/parameters.go delete mode 100644 vendor/github.com/go-swagger/go-swagger/scan/path.go delete mode 100644 vendor/github.com/go-swagger/go-swagger/scan/responses.go delete mode 100644 vendor/github.com/go-swagger/go-swagger/scan/route_params.go delete mode 100644 vendor/github.com/go-swagger/go-swagger/scan/routes.go delete mode 100644 vendor/github.com/go-swagger/go-swagger/scan/scanner.go delete mode 100644 vendor/github.com/go-swagger/go-swagger/scan/schema.go delete mode 100644 vendor/github.com/go-swagger/go-swagger/scan/validators.go create mode 100644 vendor/github.com/gorilla/handlers/.editorconfig create mode 100644 vendor/github.com/gorilla/handlers/.gitignore create mode 100644 vendor/github.com/gorilla/handlers/Makefile create mode 100644 vendor/github.com/imdario/mergo/CONTRIBUTING.md create mode 100644 vendor/github.com/imdario/mergo/SECURITY.md create mode 100644 vendor/golang.org/x/sync/errgroup/errgroup.go create mode 100644 vendor/golang.org/x/sync/errgroup/go120.go create mode 100644 vendor/golang.org/x/sync/errgroup/pre_go120.go delete mode 100644 vendor/golang.org/x/tools/go/buildutil/allpackages.go delete mode 100644 vendor/golang.org/x/tools/go/buildutil/fakecontext.go delete mode 100644 vendor/golang.org/x/tools/go/buildutil/overlay.go delete mode 100644 vendor/golang.org/x/tools/go/buildutil/tags.go delete mode 100644 vendor/golang.org/x/tools/go/buildutil/util.go delete mode 100644 vendor/golang.org/x/tools/go/internal/cgo/cgo.go delete mode 100644 vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go delete mode 100644 vendor/golang.org/x/tools/go/loader/doc.go delete mode 100644 vendor/golang.org/x/tools/go/loader/loader.go delete mode 100644 vendor/golang.org/x/tools/go/loader/util.go diff --git a/.drone.yml b/.drone.yml index 97141971a..afcba553a 100644 --- a/.drone.yml +++ b/.drone.yml @@ -80,7 +80,7 @@ steps: - yarn --cwd ./web/source build - name: snapshot - image: superseriousbusiness/gotosocial-drone-build:0.5.0 # https://github.com/superseriousbusiness/gotosocial-drone-build + image: superseriousbusiness/gotosocial-drone-build:0.6.0 # https://github.com/superseriousbusiness/gotosocial-drone-build volumes: - name: go-build-cache path: /root/.cache/go-build @@ -121,7 +121,7 @@ steps: - main - name: release - image: superseriousbusiness/gotosocial-drone-build:0.5.0 # https://github.com/superseriousbusiness/gotosocial-drone-build + image: superseriousbusiness/gotosocial-drone-build:0.6.0 # https://github.com/superseriousbusiness/gotosocial-drone-build volumes: - name: go-build-cache path: /root/.cache/go-build @@ -180,7 +180,7 @@ clone: steps: - name: mirror - image: superseriousbusiness/gotosocial-drone-build:0.5.0 + image: superseriousbusiness/gotosocial-drone-build:0.6.0 environment: ORIGIN_REPO: https://github.com/superseriousbusiness/gotosocial TARGET_REPO: https://codeberg.org/superseriousbusiness/gotosocial @@ -193,6 +193,6 @@ steps: --- kind: signature -hmac: f7ef1e0d3d4fe0a55d43ba0ab5ed6cb5f5c8bf00791464ce7b251a3cdbfd954a +hmac: 643cd740e2b7bcb39d7093d34f04863b5907efb72f2db8912337389df4ad9f0f ... diff --git a/go.mod b/go.mod index 1626d5d8a..d1a5c6fe5 100644 --- a/go.mod +++ b/go.mod @@ -34,7 +34,7 @@ require ( github.com/gin-contrib/sessions v1.0.0 github.com/gin-gonic/gin v1.9.1 github.com/go-playground/form/v4 v4.2.1 - github.com/go-swagger/go-swagger v0.30.5 + github.com/go-swagger/go-swagger v0.30.6-0.20240418033037-c46c303aaa02 github.com/google/uuid v1.6.0 github.com/gorilla/feeds v1.1.2 github.com/gorilla/websocket v1.5.1 @@ -89,7 +89,7 @@ require ( codeberg.org/gruf/go-mangler v1.3.0 // indirect codeberg.org/gruf/go-maps v1.0.3 // indirect github.com/Masterminds/goutils v1.1.1 // indirect - github.com/Masterminds/semver/v3 v3.2.0 // indirect + github.com/Masterminds/semver/v3 v3.2.1 // indirect github.com/Masterminds/sprig/v3 v3.2.3 // indirect github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect github.com/aymerick/douceur v0.2.0 // indirect @@ -111,7 +111,7 @@ require ( github.com/dsoprea/go-photoshop-info-format v0.0.0-20200610045659-121dd752914d // indirect github.com/dsoprea/go-utility/v2 v2.0.0-20200717064901-2fccff4aa15e // indirect github.com/dustin/go-humanize v1.0.1 // indirect - github.com/felixge/httpsnoop v1.0.3 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect github.com/fsnotify/fsnotify v1.7.0 // indirect github.com/gabriel-vasile/mimetype v1.4.3 // indirect github.com/gin-contrib/sse v0.1.0 // indirect @@ -120,17 +120,17 @@ require ( github.com/go-jose/go-jose/v4 v4.0.1 // indirect github.com/go-logr/logr v1.4.1 // indirect github.com/go-logr/stdr v1.2.2 // indirect - github.com/go-openapi/analysis v0.21.4 // indirect - github.com/go-openapi/errors v0.20.4 // indirect - github.com/go-openapi/inflect v0.19.0 // indirect - github.com/go-openapi/jsonpointer v0.19.6 // indirect - github.com/go-openapi/jsonreference v0.20.2 // indirect - github.com/go-openapi/loads v0.21.2 // indirect - github.com/go-openapi/runtime v0.26.0 // indirect - github.com/go-openapi/spec v0.20.9 // indirect - github.com/go-openapi/strfmt v0.21.7 // indirect - github.com/go-openapi/swag v0.22.4 // indirect - github.com/go-openapi/validate v0.22.1 // indirect + github.com/go-openapi/analysis v0.23.0 // indirect + github.com/go-openapi/errors v0.22.0 // indirect + github.com/go-openapi/inflect v0.21.0 // indirect + github.com/go-openapi/jsonpointer v0.21.0 // indirect + github.com/go-openapi/jsonreference v0.21.0 // indirect + github.com/go-openapi/loads v0.22.0 // indirect + github.com/go-openapi/runtime v0.28.0 // indirect + github.com/go-openapi/spec v0.21.0 // indirect + github.com/go-openapi/strfmt v0.23.0 // indirect + github.com/go-openapi/swag v0.23.0 // indirect + github.com/go-openapi/validate v0.24.0 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/validator/v10 v10.19.0 // indirect @@ -141,14 +141,14 @@ require ( github.com/golang/geo v0.0.0-20210211234256-740aa86cb551 // indirect github.com/gorilla/context v1.1.2 // indirect github.com/gorilla/css v1.0.0 // indirect - github.com/gorilla/handlers v1.5.1 // indirect + github.com/gorilla/handlers v1.5.2 // indirect github.com/gorilla/securecookie v1.1.2 // indirect github.com/gorilla/sessions v1.2.2 // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0 // indirect github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/hashicorp/hcl v1.0.0 // indirect - github.com/huandu/xstrings v1.3.3 // indirect - github.com/imdario/mergo v0.3.12 // indirect + github.com/huandu/xstrings v1.4.0 // indirect + github.com/imdario/mergo v0.3.16 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect @@ -183,11 +183,11 @@ require ( github.com/prometheus/procfs v0.12.0 // indirect github.com/quasoft/memstore v0.0.0-20191010062613-2bce066d2b0b // indirect github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect - github.com/rogpeppe/go-internal v1.10.0 // indirect + github.com/rogpeppe/go-internal v1.12.0 // indirect github.com/rs/xid v1.5.0 // indirect github.com/sagikazarmark/locafero v0.4.0 // indirect github.com/sagikazarmark/slog-shim v0.1.0 // indirect - github.com/shopspring/decimal v1.2.0 // indirect + github.com/shopspring/decimal v1.3.1 // indirect github.com/sirupsen/logrus v1.9.3 // indirect github.com/sourcegraph/conc v0.3.0 // indirect github.com/spf13/afero v1.11.0 // indirect @@ -209,7 +209,7 @@ require ( go.opentelemetry.io/proto/otlp v1.1.0 // indirect go.uber.org/multierr v1.11.0 // indirect golang.org/x/arch v0.7.0 // indirect - golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3 // indirect + golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 // indirect golang.org/x/mod v0.16.0 // indirect golang.org/x/sync v0.6.0 // indirect golang.org/x/sys v0.19.0 // indirect diff --git a/go.sum b/go.sum index d7cd934fd..01e8921df 100644 --- a/go.sum +++ b/go.sum @@ -85,12 +85,11 @@ github.com/KimMachineGun/automemlimit v0.6.0 h1:p/BXkH+K40Hax+PuWWPQ478hPjsp9h1C github.com/KimMachineGun/automemlimit v0.6.0/go.mod h1:T7xYht7B8r6AG/AqFcUdc7fzd2bIdBKmepfP2S1svPY= github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= -github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g= github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= +github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0= +github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA= github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM= -github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= -github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/abema/go-mp4 v1.2.0 h1:gi4X8xg/m179N/J15Fn5ugywN9vtI6PLk6iLldHGLAk= github.com/abema/go-mp4 v1.2.0/go.mod h1:vPl9t5ZK7K0x68jh12/+ECWBCXoWuIDtNgPtU2f04ws= github.com/ajg/form v1.5.1 h1:t9c7v8JUKu/XxOGBU0yjNpaMloxGEJhUkqFRq0ibGeU= @@ -98,7 +97,6 @@ github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY github.com/andybalholm/brotli v1.0.0/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/cCs= github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= -github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= @@ -176,9 +174,8 @@ github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7 github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8= github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= -github.com/felixge/httpsnoop v1.0.1/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk= -github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= @@ -218,46 +215,28 @@ github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= -github.com/go-openapi/analysis v0.21.2/go.mod h1:HZwRk4RRisyG8vx2Oe6aqeSQcoxRp47Xkp3+K6q+LdY= -github.com/go-openapi/analysis v0.21.4 h1:ZDFLvSNxpDaomuCueM0BlSXxpANBlFYiBvr+GXrvIHc= -github.com/go-openapi/analysis v0.21.4/go.mod h1:4zQ35W4neeZTqh3ol0rv/O8JBbka9QyAgQRPp9y3pfo= -github.com/go-openapi/errors v0.19.8/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= -github.com/go-openapi/errors v0.19.9/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= -github.com/go-openapi/errors v0.20.2/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= -github.com/go-openapi/errors v0.20.4 h1:unTcVm6PispJsMECE3zWgvG4xTiKda1LIR5rCRWLG6M= -github.com/go-openapi/errors v0.20.4/go.mod h1:Z3FlZ4I8jEGxjUK+bugx3on2mIAk4txuAOhlsB1FSgk= -github.com/go-openapi/inflect v0.19.0 h1:9jCH9scKIbHeV9m12SmPilScz6krDxKRasNNSNPXu/4= -github.com/go-openapi/inflect v0.19.0/go.mod h1:lHpZVlpIQqLyKwJ4N+YSc9hchQy/i12fJykb83CRBH4= -github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= -github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= -github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE= -github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs= -github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns= -github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo= -github.com/go-openapi/jsonreference v0.20.2 h1:3sVjiK66+uXK/6oQ8xgcRKcFgQ5KXa2KvnJRumpMGbE= -github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k= -github.com/go-openapi/loads v0.21.1/go.mod h1:/DtAMXXneXFjbQMGEtbamCZb+4x7eGwkvZCvBmwUG+g= -github.com/go-openapi/loads v0.21.2 h1:r2a/xFIYeZ4Qd2TnGpWDIQNcP80dIaZgf704za8enro= -github.com/go-openapi/loads v0.21.2/go.mod h1:Jq58Os6SSGz0rzh62ptiu8Z31I+OTHqmULx5e/gJbNw= -github.com/go-openapi/runtime v0.26.0 h1:HYOFtG00FM1UvqrcxbEJg/SwvDRvYLQKGhw2zaQjTcc= -github.com/go-openapi/runtime v0.26.0/go.mod h1:QgRGeZwrUcSHdeh4Ka9Glvo0ug1LC5WyE+EV88plZrQ= -github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I= -github.com/go-openapi/spec v0.20.6/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA= -github.com/go-openapi/spec v0.20.9 h1:xnlYNQAwKd2VQRRfwTEI0DcK+2cbuvI/0c7jx3gA8/8= -github.com/go-openapi/spec v0.20.9/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA= -github.com/go-openapi/strfmt v0.21.0/go.mod h1:ZRQ409bWMj+SOgXofQAGTIo2Ebu72Gs+WaRADcS5iNg= -github.com/go-openapi/strfmt v0.21.1/go.mod h1:I/XVKeLc5+MM5oPNN7P6urMOpuLXEcNrCX/rPGuWb0k= -github.com/go-openapi/strfmt v0.21.3/go.mod h1:k+RzNO0Da+k3FrrynSNN8F7n/peCmQQqbbXjtDfvmGg= -github.com/go-openapi/strfmt v0.21.7 h1:rspiXgNWgeUzhjo1YU01do6qsahtJNByjLVbPLNHb8k= -github.com/go-openapi/strfmt v0.21.7/go.mod h1:adeGTkxE44sPyLk0JV235VQAO/ZXUr8KAzYjclFs3ew= -github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= -github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= -github.com/go-openapi/swag v0.21.1/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= -github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= -github.com/go-openapi/swag v0.22.4 h1:QLMzNJnMGPRNDCbySlcj1x01tzU8/9LTTL9hZZZogBU= -github.com/go-openapi/swag v0.22.4/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= -github.com/go-openapi/validate v0.22.1 h1:G+c2ub6q47kfX1sOBLwIQwzBVt8qmOAARyo/9Fqs9NU= -github.com/go-openapi/validate v0.22.1/go.mod h1:rjnrwK57VJ7A8xqfpAOEKRH8yQSGUriMu5/zuPSQ1hg= +github.com/go-openapi/analysis v0.23.0 h1:aGday7OWupfMs+LbmLZG4k0MYXIANxcuBTYUC03zFCU= +github.com/go-openapi/analysis v0.23.0/go.mod h1:9mz9ZWaSlV8TvjQHLl2mUW2PbZtemkE8yA5v22ohupo= +github.com/go-openapi/errors v0.22.0 h1:c4xY/OLxUBSTiepAg3j/MHuAv5mJhnf53LLMWFB+u/w= +github.com/go-openapi/errors v0.22.0/go.mod h1:J3DmZScxCDufmIMsdOuDHxJbdOGC0xtUynjIx092vXE= +github.com/go-openapi/inflect v0.21.0 h1:FoBjBTQEcbg2cJUWX6uwL9OyIW8eqc9k4KhN4lfbeYk= +github.com/go-openapi/inflect v0.21.0/go.mod h1:INezMuUu7SJQc2AyR3WO0DqqYUJSj8Kb4hBd7WtjlAw= +github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= +github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= +github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ= +github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4= +github.com/go-openapi/loads v0.22.0 h1:ECPGd4jX1U6NApCGG1We+uEozOAvXvJSF4nnwHZ8Aco= +github.com/go-openapi/loads v0.22.0/go.mod h1:yLsaTCS92mnSAZX5WWoxszLj0u+Ojl+Zs5Stn1oF+rs= +github.com/go-openapi/runtime v0.28.0 h1:gpPPmWSNGo214l6n8hzdXYhPuJcGtziTOgUpvsFWGIQ= +github.com/go-openapi/runtime v0.28.0/go.mod h1:QN7OzcS+XuYmkQLw05akXk0jRH/eZ3kb18+1KwW9gyc= +github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY= +github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk= +github.com/go-openapi/strfmt v0.23.0 h1:nlUS6BCqcnAk0pyhi9Y+kdDVZdZMHfEKQiS4HaMgO/c= +github.com/go-openapi/strfmt v0.23.0/go.mod h1:NrtIpfKtWIygRkKVsxh7XQMDQW5HKQl6S5ik2elW+K4= +github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= +github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= +github.com/go-openapi/validate v0.24.0 h1:LdfDKwNbpB6Vn40xhTdNZAnfLECL81w+VX3BumrGD58= +github.com/go-openapi/validate v0.24.0/go.mod h1:iyeX1sEufmv3nPbBdX3ieNviWnOZaJ1+zquzJEf2BAQ= github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= @@ -270,39 +249,12 @@ github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91 github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4= github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= github.com/go-session/session v3.1.2+incompatible/go.mod h1:8B3iivBQjrz/JtC68Np2T1yBBLxTan3mn/3OM0CyRt0= -github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/go-swagger/go-swagger v0.30.5 h1:SQ2+xSonWjjoEMOV5tcOnZJVlfyUfCBhGQGArS1b9+U= -github.com/go-swagger/go-swagger v0.30.5/go.mod h1:cWUhSyCNqV7J1wkkxfr5QmbcnCewetCdvEXqgPvbc/Q= -github.com/go-swagger/scan-repo-boundary v0.0.0-20180623220736-973b3573c013 h1:l9rI6sNaZgNC0LnF3MiE+qTmyBA/tZAg1rtyrGbUMK0= -github.com/go-swagger/scan-repo-boundary v0.0.0-20180623220736-973b3573c013/go.mod h1:b65mBPzqzZWxOZGxSWrqs4GInLIn+u99Q9q7p+GKni0= +github.com/go-swagger/go-swagger v0.30.6-0.20240418033037-c46c303aaa02 h1:J6YiT/eg3gAfKMdVCkWXe6khsO+nxa8W4URZ4AUqzbA= +github.com/go-swagger/go-swagger v0.30.6-0.20240418033037-c46c303aaa02/go.mod h1:i1/E+d8iPNReSE7y04FaVu5OPKB3il5cn+T1Egogg3I= github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM= github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= github.com/go-xmlfmt/xmlfmt v0.0.0-20211206191508-7fd73a941850 h1:PSPmmucxGiFBtbQcttHTUc4LQ3P09AW+ldO2qspyKdY= github.com/go-xmlfmt/xmlfmt v0.0.0-20211206191508-7fd73a941850/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= -github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0= -github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY= -github.com/gobuffalo/depgen v0.1.0/go.mod h1:+ifsuy7fhi15RWncXQQKjWS9JPkdah5sZvtHc2RXGlg= -github.com/gobuffalo/envy v1.6.15/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= -github.com/gobuffalo/envy v1.7.0/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= -github.com/gobuffalo/flect v0.1.0/go.mod h1:d2ehjJqGOH/Kjqcoz+F7jHTBbmDb38yXA598Hb50EGs= -github.com/gobuffalo/flect v0.1.1/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= -github.com/gobuffalo/flect v0.1.3/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= -github.com/gobuffalo/genny v0.0.0-20190329151137-27723ad26ef9/go.mod h1:rWs4Z12d1Zbf19rlsn0nurr75KqhYp52EAGGxTbBhNk= -github.com/gobuffalo/genny v0.0.0-20190403191548-3ca520ef0d9e/go.mod h1:80lIj3kVJWwOrXWWMRzzdhW3DsrdjILVil/SFKBzF28= -github.com/gobuffalo/genny v0.1.0/go.mod h1:XidbUqzak3lHdS//TPu2OgiFB+51Ur5f7CSnXZ/JDvo= -github.com/gobuffalo/genny v0.1.1/go.mod h1:5TExbEyY48pfunL4QSXxlDOmdsD44RRq4mVZ0Ex28Xk= -github.com/gobuffalo/gitgen v0.0.0-20190315122116-cc086187d211/go.mod h1:vEHJk/E9DmhejeLeNt7UVvlSGv3ziL+djtTr3yyzcOw= -github.com/gobuffalo/gogen v0.0.0-20190315121717-8f38393713f5/go.mod h1:V9QVDIxsgKNZs6L2IYiGR8datgMhB577vzTDqypH360= -github.com/gobuffalo/gogen v0.1.0/go.mod h1:8NTelM5qd8RZ15VjQTFkAW6qOMx5wBbW4dSCS3BY8gg= -github.com/gobuffalo/gogen v0.1.1/go.mod h1:y8iBtmHmGc4qa3urIyo1shvOD8JftTtfcKi+71xfDNE= -github.com/gobuffalo/logger v0.0.0-20190315122211-86e12af44bc2/go.mod h1:QdxcLw541hSGtBnhUc4gaNIXRjiDppFGaDqzbrBd3v8= -github.com/gobuffalo/mapi v1.0.1/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= -github.com/gobuffalo/mapi v1.0.2/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= -github.com/gobuffalo/packd v0.0.0-20190315124812-a385830c7fc0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= -github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= -github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ= -github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0= -github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/godbus/dbus/v5 v5.0.4 h1:9349emZab16e7zQvpmsbtjc18ykshndd8y2PG3sgJbA= @@ -342,7 +294,6 @@ github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:W github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -352,7 +303,6 @@ github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk= @@ -386,8 +336,8 @@ github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY= github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= github.com/gorilla/feeds v1.1.2 h1:pxzZ5PD3RJdhFH2FsJJ4x6PqMqbgFk1+Vez4XWBW8Iw= github.com/gorilla/feeds v1.1.2/go.mod h1:WMib8uJP3BbY+X8Szd1rA5Pzhdfh+HCCAYT2z7Fza6Y= -github.com/gorilla/handlers v1.5.1 h1:9lRY6j8DEeeBT10CvO9hGW0gmky0BprnvDI5vfhUHH4= -github.com/gorilla/handlers v1.5.1/go.mod h1:t8XrUpc4KVXb7HGyJ4/cEnwQiaxrX/hz1Zv/4g96P1Q= +github.com/gorilla/handlers v1.5.2 h1:cLTUSsNkgcwhgRqvCNmdbRWG0A3N4F+M2nWKdScwyEE= +github.com/gorilla/handlers v1.5.2/go.mod h1:dX+xVpaxdSw+q0Qek8SSsl3dfMk3jNddUkMzo0GtH0w= github.com/gorilla/securecookie v1.1.2 h1:YCIWL56dvtr73r6715mJs5ZvhtnY73hBvEF8kXD8ePA= github.com/gorilla/securecookie v1.1.2/go.mod h1:NfCASbcHqRSY+3a8tlWJwsQap2VX5pwzwo4h3eOamfo= github.com/gorilla/sessions v1.2.2 h1:lqzMYz6bOfvn2WriPUjNByzeXIlVzURcPmgMczkmTjY= @@ -406,15 +356,15 @@ github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyf github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/huandu/xstrings v1.3.3 h1:/Gcsuc1x8JVbJ9/rlye4xZnVAbEkGauT8lbebqcQws4= github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/huandu/xstrings v1.4.0 h1:D17IlohoQq4UcpqD7fDk80P7l+lwAmlFaBHgOipl2FU= +github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= -github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU= -github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= +github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= github.com/imkira/go-interpol v1.1.0 h1:KIiKr0VSG2CUW1hl1jpiyuzuJeKUUpC8iM1AIE7N1Vk= github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA= -github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= @@ -430,7 +380,6 @@ github.com/jessevdk/go-flags v1.5.0 h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LF github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= -github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= @@ -440,12 +389,9 @@ github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/X github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= -github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4= -github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.10.4/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.10.10/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= -github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg= github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= @@ -453,10 +399,7 @@ github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa02 github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM= github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= @@ -468,13 +411,8 @@ github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= -github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= -github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= -github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= -github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= -github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= github.com/mattn/go-colorable v0.1.7/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= @@ -498,8 +436,6 @@ github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa1 github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= github.com/mitchellh/hashstructure/v2 v2.0.2 h1:vGKWl0YJqUNxE8d+h8f6NJLcCJrgbhC4NcD46KavDd4= github.com/mitchellh/hashstructure/v2 v2.0.2/go.mod h1:MG3aRVU/N29oo/V/IhBX8GR/zz4kQkprJgF2EVszyDE= -github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= @@ -510,12 +446,10 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= -github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/moul/http2curl v1.0.0 h1:dRMWoAtb+ePxMlLkrCbAqh4TlPHXvoGUSQ323/9Zahs= github.com/moul/http2curl v1.0.0/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ= github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4= github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls= -github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= @@ -530,12 +464,9 @@ github.com/orcaman/writerseeker v0.0.0-20200621085525-1d3f536ff85e h1:s2RNOM/IGd github.com/orcaman/writerseeker v0.0.0-20200621085525-1d3f536ff85e/go.mod h1:nBdnFKj15wFbf94Rwfq4m30eAcyY9V/IyKAGQFtqkW0= github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2DcNVpwGmV9E1BkGknEliJkfwQj0= github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y= -github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= github.com/pelletier/go-toml/v2 v2.2.0 h1:QLgLl2yMN7N+ruc31VynXs1vhMZa7CeHHejIeBAsoHo= github.com/pelletier/go-toml/v2 v2.2.0/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= -github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= @@ -556,12 +487,10 @@ github.com/quasoft/memstore v0.0.0-20191010062613-2bce066d2b0b h1:aUNXCGgukb4gtY github.com/quasoft/memstore v0.0.0-20191010062613-2bce066d2b0b/go.mod h1:wTPjTepVu7uJBYgZ0SdWHQlIas582j6cn2jgk4DDdlg= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= -github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= -github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= -github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= +github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= +github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc= github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= @@ -572,11 +501,9 @@ github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWR github.com/sclevine/agouti v3.0.0+incompatible/go.mod h1:b4WX9W9L1sfQKXeJf1mUTLZKJ48R1S7H23Ji7oFO5Bw= github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= -github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= -github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= +github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= @@ -590,16 +517,13 @@ github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNo github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= -github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= -github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ= github.com/spf13/viper v1.18.2/go.mod h1:EKmWIqdnk5lOcmR72yw6hS+8OPYcwD0jteitLMVB+yk= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= @@ -691,11 +615,6 @@ github.com/wagslane/go-password-validator v0.3.0 h1:vfxOPzGHkz5S146HDpavl0cw1DSV github.com/wagslane/go-password-validator v0.3.0/go.mod h1:TI1XJ6T5fRdRnHqHt14pvy1tNVnrwe7m3/f1f2fDphQ= github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= -github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= -github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= -github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g= -github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= -github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= @@ -704,7 +623,6 @@ github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17 github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0 h1:6fRhSjgLCkTD3JnJxvaJ4Sj+TYblw757bqYgZaOq5ZY= github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0/go.mod h1:/LWChgwKmvncFJFHJ7Gvn9wZArjbV5/FppcK2fKk/tI= -github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= github.com/yudai/gojsondiff v1.0.0 h1:27cbfqXLVEJ1o8I6v3y9lg8Ydm53EKqHXAOMxEGlCOA= github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82 h1:BHyfKlQyqbsFN5p3IfnEUduWvb9is428/nNb5L3U01M= @@ -718,9 +636,6 @@ github.com/yuin/goldmark v1.7.1 h1:3bajkSilaCbjdKVsKdZjZCLBNPL9pYzrCakKaf4U49U= github.com/yuin/goldmark v1.7.1/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E= gitlab.com/NyaaaWhatsUpDoc/sqlite v1.29.8-concurrency-workaround h1:ESobxED9bfE0nOQP/WPv9+tMR8oZoDIWRKlNK2Vs4Ms= gitlab.com/NyaaaWhatsUpDoc/sqlite v1.29.8-concurrency-workaround/go.mod h1:lQPm27iqa4UNZpmr4Aor0MH0HkCLbt1huYDfWylLZFk= -go.mongodb.org/mongo-driver v1.7.3/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg= -go.mongodb.org/mongo-driver v1.7.5/go.mod h1:VXEWRZ6URJIkUq2SCAyapmhH0ZLRBP+FT4xhp5Zvxng= -go.mongodb.org/mongo-driver v1.10.0/go.mod h1:wsihk0Kdgv8Kqu1Anit4sfK+22vSFbUrAVEYRhCXrA8= go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80= go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= @@ -757,16 +672,12 @@ go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN8 golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc= golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= -golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30= golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= @@ -780,8 +691,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3 h1:hNQpMuAJe5CtcUqCXaWga3FHu+kQvCqcsoVaQgSV60o= -golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3/go.mod h1:idGWGoKP1toJGkd5/ig9ZLuPcZBC3ewk7SzmH0uou08= +golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 h1:LfspQV/FYTatPTr/3HzIcmiUFH7PGP+OQ6mgDYo3yuQ= +golang.org/x/exp v0.0.0-20240222234643-814bf88cf225/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= @@ -838,8 +749,6 @@ golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81R golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w= @@ -856,27 +765,20 @@ golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190419153524-e8e3143a4f4a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -903,8 +805,6 @@ golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -924,8 +824,6 @@ golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= @@ -940,13 +838,9 @@ golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3 golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190329151228-23e29df326fe/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190416151739-9c9e1878f421/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= @@ -1071,7 +965,6 @@ google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHh gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= @@ -1091,9 +984,7 @@ gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/vendor/github.com/Masterminds/semver/v3/.golangci.yml b/vendor/github.com/Masterminds/semver/v3/.golangci.yml index c87d1c4b9..fbc633259 100644 --- a/vendor/github.com/Masterminds/semver/v3/.golangci.yml +++ b/vendor/github.com/Masterminds/semver/v3/.golangci.yml @@ -5,12 +5,9 @@ linters: disable-all: true enable: - misspell - - structcheck - govet - staticcheck - - deadcode - errcheck - - varcheck - unparam - ineffassign - nakedret diff --git a/vendor/github.com/Masterminds/semver/v3/Makefile b/vendor/github.com/Masterminds/semver/v3/Makefile index eac19178f..0e7b5c713 100644 --- a/vendor/github.com/Masterminds/semver/v3/Makefile +++ b/vendor/github.com/Masterminds/semver/v3/Makefile @@ -1,7 +1,5 @@ GOPATH=$(shell go env GOPATH) GOLANGCI_LINT=$(GOPATH)/bin/golangci-lint -GOFUZZBUILD = $(GOPATH)/bin/go-fuzz-build -GOFUZZ = $(GOPATH)/bin/go-fuzz .PHONY: lint lint: $(GOLANGCI_LINT) @@ -19,19 +17,14 @@ test-cover: GO111MODULE=on go test -cover . .PHONY: fuzz -fuzz: $(GOFUZZBUILD) $(GOFUZZ) - @echo "==> Fuzz testing" - $(GOFUZZBUILD) - $(GOFUZZ) -workdir=_fuzz +fuzz: + @echo "==> Running Fuzz Tests" + go test -fuzz=FuzzNewVersion -fuzztime=15s . + go test -fuzz=FuzzStrictNewVersion -fuzztime=15s . + go test -fuzz=FuzzNewConstraint -fuzztime=15s . $(GOLANGCI_LINT): # Install golangci-lint. The configuration for it is in the .golangci.yml # file in the root of the repository echo ${GOPATH} curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh -s -- -b $(GOPATH)/bin v1.17.1 - -$(GOFUZZBUILD): - cd / && go get -u github.com/dvyukov/go-fuzz/go-fuzz-build - -$(GOFUZZ): - cd / && go get -u github.com/dvyukov/go-fuzz/go-fuzz github.com/dvyukov/go-fuzz/go-fuzz-dep \ No newline at end of file diff --git a/vendor/github.com/Masterminds/semver/v3/README.md b/vendor/github.com/Masterminds/semver/v3/README.md index d8f54dcbd..eab8cac3b 100644 --- a/vendor/github.com/Masterminds/semver/v3/README.md +++ b/vendor/github.com/Masterminds/semver/v3/README.md @@ -18,18 +18,20 @@ If you are looking for a command line tool for version comparisons please see ## Package Versions +Note, import `github.com/github.com/Masterminds/semver/v3` to use the latest version. + There are three major versions fo the `semver` package. -* 3.x.x is the new stable and active version. This version is focused on constraint +* 3.x.x is the stable and active version. This version is focused on constraint compatibility for range handling in other tools from other languages. It has a similar API to the v1 releases. The development of this version is on the master branch. The documentation for this version is below. * 2.x was developed primarily for [dep](https://github.com/golang/dep). There are no tagged releases and the development was performed by [@sdboyer](https://github.com/sdboyer). There are API breaking changes from v1. This version lives on the [2.x branch](https://github.com/Masterminds/semver/tree/2.x). -* 1.x.x is the most widely used version with numerous tagged releases. This is the - previous stable and is still maintained for bug fixes. The development, to fix - bugs, occurs on the release-1 branch. You can read the documentation [here](https://github.com/Masterminds/semver/blob/release-1/README.md). +* 1.x.x is the original release. It is no longer maintained. You should use the + v3 release instead. You can read the documentation for the 1.x.x release + [here](https://github.com/Masterminds/semver/blob/release-1/README.md). ## Parsing Semantic Versions @@ -242,3 +244,15 @@ for _, m := range msgs { If you find an issue or want to contribute please file an [issue](https://github.com/Masterminds/semver/issues) or [create a pull request](https://github.com/Masterminds/semver/pulls). + +## Security + +Security is an important consideration for this project. The project currently +uses the following tools to help discover security issues: + +* [CodeQL](https://github.com/Masterminds/semver) +* [gosec](https://github.com/securego/gosec) +* Daily Fuzz testing + +If you believe you have found a security vulnerability you can privately disclose +it through the [GitHub security page](https://github.com/Masterminds/semver/security). diff --git a/vendor/github.com/Masterminds/semver/v3/SECURITY.md b/vendor/github.com/Masterminds/semver/v3/SECURITY.md new file mode 100644 index 000000000..a30a66b1f --- /dev/null +++ b/vendor/github.com/Masterminds/semver/v3/SECURITY.md @@ -0,0 +1,19 @@ +# Security Policy + +## Supported Versions + +The following versions of semver are currently supported: + +| Version | Supported | +| ------- | ------------------ | +| 3.x | :white_check_mark: | +| 2.x | :x: | +| 1.x | :x: | + +Fixes are only released for the latest minor version in the form of a patch release. + +## Reporting a Vulnerability + +You can privately disclose a vulnerability through GitHubs +[private vulnerability reporting](https://github.com/Masterminds/semver/security/advisories) +mechanism. diff --git a/vendor/github.com/Masterminds/semver/v3/constraints.go b/vendor/github.com/Masterminds/semver/v3/constraints.go index 203072e46..8461c7ed9 100644 --- a/vendor/github.com/Masterminds/semver/v3/constraints.go +++ b/vendor/github.com/Masterminds/semver/v3/constraints.go @@ -586,7 +586,7 @@ func rewriteRange(i string) string { } o := i for _, v := range m { - t := fmt.Sprintf(">= %s, <= %s", v[1], v[11]) + t := fmt.Sprintf(">= %s, <= %s ", v[1], v[11]) o = strings.Replace(o, v[0], t, 1) } diff --git a/vendor/github.com/Masterminds/semver/v3/fuzz.go b/vendor/github.com/Masterminds/semver/v3/fuzz.go deleted file mode 100644 index a242ad705..000000000 --- a/vendor/github.com/Masterminds/semver/v3/fuzz.go +++ /dev/null @@ -1,22 +0,0 @@ -// +build gofuzz - -package semver - -func Fuzz(data []byte) int { - d := string(data) - - // Test NewVersion - _, _ = NewVersion(d) - - // Test StrictNewVersion - _, _ = StrictNewVersion(d) - - // Test NewConstraint - _, _ = NewConstraint(d) - - // The return value should be 0 normally, 1 if the priority in future tests - // should be increased, and -1 if future tests should skip passing in that - // data. We do not have a reason to change priority so 0 is always returned. - // There are example tests that do this. - return 0 -} diff --git a/vendor/github.com/felixge/httpsnoop/.travis.yml b/vendor/github.com/felixge/httpsnoop/.travis.yml deleted file mode 100644 index bfc421200..000000000 --- a/vendor/github.com/felixge/httpsnoop/.travis.yml +++ /dev/null @@ -1,6 +0,0 @@ -language: go - -go: - - 1.6 - - 1.7 - - 1.8 diff --git a/vendor/github.com/felixge/httpsnoop/Makefile b/vendor/github.com/felixge/httpsnoop/Makefile index 2d84889ae..4e12afdd9 100644 --- a/vendor/github.com/felixge/httpsnoop/Makefile +++ b/vendor/github.com/felixge/httpsnoop/Makefile @@ -1,7 +1,7 @@ .PHONY: ci generate clean ci: clean generate - go test -v ./... + go test -race -v ./... generate: go generate . diff --git a/vendor/github.com/felixge/httpsnoop/README.md b/vendor/github.com/felixge/httpsnoop/README.md index ddcecd13e..cf6b42f3d 100644 --- a/vendor/github.com/felixge/httpsnoop/README.md +++ b/vendor/github.com/felixge/httpsnoop/README.md @@ -7,8 +7,8 @@ http.Handlers. Doing this requires non-trivial wrapping of the http.ResponseWriter interface, which is also exposed for users interested in a more low-level API. -[![GoDoc](https://godoc.org/github.com/felixge/httpsnoop?status.svg)](https://godoc.org/github.com/felixge/httpsnoop) -[![Build Status](https://travis-ci.org/felixge/httpsnoop.svg?branch=master)](https://travis-ci.org/felixge/httpsnoop) +[![Go Reference](https://pkg.go.dev/badge/github.com/felixge/httpsnoop.svg)](https://pkg.go.dev/github.com/felixge/httpsnoop) +[![Build Status](https://github.com/felixge/httpsnoop/actions/workflows/main.yaml/badge.svg)](https://github.com/felixge/httpsnoop/actions/workflows/main.yaml) ## Usage Example diff --git a/vendor/github.com/felixge/httpsnoop/capture_metrics.go b/vendor/github.com/felixge/httpsnoop/capture_metrics.go index b77cc7c00..bec7b71b3 100644 --- a/vendor/github.com/felixge/httpsnoop/capture_metrics.go +++ b/vendor/github.com/felixge/httpsnoop/capture_metrics.go @@ -52,7 +52,7 @@ func (m *Metrics) CaptureMetrics(w http.ResponseWriter, fn func(http.ResponseWri return func(code int) { next(code) - if !headerWritten { + if !(code >= 100 && code <= 199) && !headerWritten { m.Code = code headerWritten = true } diff --git a/vendor/github.com/felixge/httpsnoop/wrap_generated_gteq_1.8.go b/vendor/github.com/felixge/httpsnoop/wrap_generated_gteq_1.8.go index 31cbdfb8e..101cedde6 100644 --- a/vendor/github.com/felixge/httpsnoop/wrap_generated_gteq_1.8.go +++ b/vendor/github.com/felixge/httpsnoop/wrap_generated_gteq_1.8.go @@ -1,5 +1,5 @@ // +build go1.8 -// Code generated by "httpsnoop/codegen"; DO NOT EDIT +// Code generated by "httpsnoop/codegen"; DO NOT EDIT. package httpsnoop diff --git a/vendor/github.com/felixge/httpsnoop/wrap_generated_lt_1.8.go b/vendor/github.com/felixge/httpsnoop/wrap_generated_lt_1.8.go index ab99c07c7..e0951df15 100644 --- a/vendor/github.com/felixge/httpsnoop/wrap_generated_lt_1.8.go +++ b/vendor/github.com/felixge/httpsnoop/wrap_generated_lt_1.8.go @@ -1,5 +1,5 @@ // +build !go1.8 -// Code generated by "httpsnoop/codegen"; DO NOT EDIT +// Code generated by "httpsnoop/codegen"; DO NOT EDIT. package httpsnoop diff --git a/vendor/github.com/go-openapi/analysis/.golangci.yml b/vendor/github.com/go-openapi/analysis/.golangci.yml index e24a6c14e..22f8d21cc 100644 --- a/vendor/github.com/go-openapi/analysis/.golangci.yml +++ b/vendor/github.com/go-openapi/analysis/.golangci.yml @@ -4,53 +4,58 @@ linters-settings: golint: min-confidence: 0 gocyclo: - min-complexity: 40 - gocognit: - min-complexity: 40 + min-complexity: 45 maligned: suggest-new: true dupl: - threshold: 150 + threshold: 200 goconst: min-len: 2 - min-occurrences: 4 + min-occurrences: 3 linters: enable-all: true disable: - maligned + - unparam - lll - - gochecknoglobals - gochecknoinits - # scopelint is useful, but also reports false positives - # that unfortunately can't be disabled. So we disable the - # linter rather than changing code that works. - # see: https://github.com/kyoh86/scopelint/issues/4 - - scopelint + - gochecknoglobals + - funlen - godox - gocognit - #- whitespace + - whitespace - wsl - - funlen - - testpackage - wrapcheck - #- nlreturn + - testpackage + - nlreturn - gomnd - - goerr113 - exhaustivestruct - #- errorlint - #- nestif - - gofumpt + - goerr113 + - errorlint + - nestif - godot - - gci - - dogsled + - gofumpt - paralleltest - tparallel - thelper - ifshort - - forbidigo - - cyclop - - varnamelen - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint - nosnakecase diff --git a/vendor/github.com/go-openapi/analysis/README.md b/vendor/github.com/go-openapi/analysis/README.md index aad6da10f..e005d4b37 100644 --- a/vendor/github.com/go-openapi/analysis/README.md +++ b/vendor/github.com/go-openapi/analysis/README.md @@ -1,8 +1,5 @@ -# OpenAPI initiative analysis +# OpenAPI analysis [![Build Status](https://github.com/go-openapi/analysis/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/analysis/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/analysis/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/analysis) -[![Build Status](https://travis-ci.org/go-openapi/analysis.svg?branch=master)](https://travis-ci.org/go-openapi/analysis) -[![Build status](https://ci.appveyor.com/api/projects/status/x377t5o9ennm847o/branch/master?svg=true)](https://ci.appveyor.com/project/casualjim/go-openapi/analysis/branch/master) -[![codecov](https://codecov.io/gh/go-openapi/analysis/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/analysis) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) [![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/analysis/master/LICENSE) [![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/analysis.svg)](https://pkg.go.dev/github.com/go-openapi/analysis) @@ -13,12 +10,12 @@ A foundational library to analyze an OAI specification document for easier reaso ## What's inside? -* A analyzer providing methods to walk the functional content of a specification +* An analyzer providing methods to walk the functional content of a specification * A spec flattener producing a self-contained document bundle, while preserving `$ref`s * A spec merger ("mixin") to merge several spec documents into a primary spec * A spec "fixer" ensuring that response descriptions are non empty -[Documentation](https://godoc.org/github.com/go-openapi/analysis) +[Documentation](https://pkg.go.dev/github.com/go-openapi/analysis) ## FAQ @@ -28,4 +25,3 @@ A foundational library to analyze an OAI specification document for easier reaso > This package currently only supports OpenAPI 2.0 (aka Swagger 2.0). > There is no plan to make it evolve toward supporting OpenAPI 3.x. > This [discussion thread](https://github.com/go-openapi/spec/issues/21) relates the full story. -> diff --git a/vendor/github.com/go-openapi/analysis/appveyor.yml b/vendor/github.com/go-openapi/analysis/appveyor.yml deleted file mode 100644 index c2f6fd733..000000000 --- a/vendor/github.com/go-openapi/analysis/appveyor.yml +++ /dev/null @@ -1,32 +0,0 @@ -version: "0.1.{build}" - -clone_folder: C:\go-openapi\analysis -shallow_clone: true # for startup speed -pull_requests: - do_not_increment_build_number: true - -#skip_tags: true -#skip_branch_with_pr: true - -# appveyor.yml -build: off - -environment: - GOPATH: c:\gopath - -stack: go 1.16 - -test_script: - - go test -v -timeout 20m ./... - -deploy: off - -notifications: - - provider: Slack - incoming_webhook: https://hooks.slack.com/services/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 - auth_token: - secure: Sf7kZf7ZGbnwWUMpffHwMu5A0cHkLK2MYY32LNTPj4+/3qC3Ghl7+9v4TSLOqOlCwdRNjOGblAq7s+GDJed6/xgRQl1JtCi1klzZNrYX4q01pgTPvvGcwbBkIYgeMaPeIRcK9OZnud7sRXdttozgTOpytps2U6Js32ip7uj5mHSg2ub0FwoSJwlS6dbezZ8+eDhoha0F/guY99BEwx8Bd+zROrT2TFGsSGOFGN6wFc7moCqTHO/YkWib13a2QNXqOxCCVBy/lt76Wp+JkeFppjHlzs/2lP3EAk13RIUAaesdEUHvIHrzCyNJEd3/+KO2DzsWOYfpktd+KBCvgaYOsoo7ubdT3IROeAegZdCgo/6xgCEsmFc9ZcqCfN5yNx2A+BZ2Vwmpws+bQ1E1+B5HDzzaiLcYfG4X2O210QVGVDLWsv1jqD+uPYeHY2WRfh5ZsIUFvaqgUEnwHwrK44/8REAhQavt1QAj5uJpsRd7CkRVPWRNK+yIky+wgbVUFEchRNmS55E7QWf+W4+4QZkQi7vUTMc9nbTUu2Es9NfvfudOpM2wZbn98fjpb/qq/nRv6Bk+ca+7XD5/IgNLMbWp2ouDdzbiHLCOfDUiHiDJhLfFZx9Bwo7ZwfzeOlbrQX66bx7xRKYmOe4DLrXhNcpbsMa8qbfxlZRCmYbubB/Y8h4= - channel: bots - on_build_success: false - on_build_failure: true - on_build_status_changed: true diff --git a/vendor/github.com/go-openapi/analysis/doc.go b/vendor/github.com/go-openapi/analysis/doc.go index d5294c095..e8d9f9b13 100644 --- a/vendor/github.com/go-openapi/analysis/doc.go +++ b/vendor/github.com/go-openapi/analysis/doc.go @@ -16,27 +16,27 @@ Package analysis provides methods to work with a Swagger specification document from package go-openapi/spec. -Analyzing a specification +## Analyzing a specification An analysed specification object (type Spec) provides methods to work with swagger definition. -Flattening or expanding a specification +## Flattening or expanding a specification Flattening a specification bundles all remote $ref in the main spec document. Depending on flattening options, additional preprocessing may take place: - full flattening: replacing all inline complex constructs by a named entry in #/definitions - expand: replace all $ref's in the document by their expanded content -Merging several specifications +## Merging several specifications Mixin several specifications merges all Swagger constructs, and warns about found conflicts. -Fixing a specification +## Fixing a specification Unmarshalling a specification with golang json unmarshalling may lead to some unwanted result on present but empty fields. -Analyzing a Swagger schema +## Analyzing a Swagger schema Swagger schemas are analyzed to determine their complexity and qualify their content. */ diff --git a/vendor/github.com/go-openapi/analysis/flatten.go b/vendor/github.com/go-openapi/analysis/flatten.go index 0576220fb..ebedcc9df 100644 --- a/vendor/github.com/go-openapi/analysis/flatten.go +++ b/vendor/github.com/go-openapi/analysis/flatten.go @@ -62,28 +62,26 @@ func newContext() *context { // // There is a minimal and a full flattening mode. // -// // Minimally flattening a spec means: -// - Expanding parameters, responses, path items, parameter items and header items (references to schemas are left -// unscathed) -// - Importing external (http, file) references so they become internal to the document -// - Moving every JSON pointer to a $ref to a named definition (i.e. the reworked spec does not contain pointers -// like "$ref": "#/definitions/myObject/allOfs/1") +// - Expanding parameters, responses, path items, parameter items and header items (references to schemas are left +// unscathed) +// - Importing external (http, file) references so they become internal to the document +// - Moving every JSON pointer to a $ref to a named definition (i.e. the reworked spec does not contain pointers +// like "$ref": "#/definitions/myObject/allOfs/1") // // A minimally flattened spec thus guarantees the following properties: -// - all $refs point to a local definition (i.e. '#/definitions/...') -// - definitions are unique +// - all $refs point to a local definition (i.e. '#/definitions/...') +// - definitions are unique // // NOTE: arbitrary JSON pointers (other than $refs to top level definitions) are rewritten as definitions if they // represent a complex schema or express commonality in the spec. // Otherwise, they are simply expanded. // Self-referencing JSON pointers cannot resolve to a type and trigger an error. // -// // Minimal flattening is necessary and sufficient for codegen rendering using go-swagger. // // Fully flattening a spec means: -// - Moving every complex inline schema to be a definition with an auto-generated name in a depth-first fashion. +// - Moving every complex inline schema to be a definition with an auto-generated name in a depth-first fashion. // // By complex, we mean every JSON object with some properties. // Arrays, when they do not define a tuple, @@ -93,22 +91,21 @@ func newContext() *context { // have been created. // // Available flattening options: -// - Minimal: stops flattening after minimal $ref processing, leaving schema constructs untouched -// - Expand: expand all $ref's in the document (inoperant if Minimal set to true) -// - Verbose: croaks about name conflicts detected -// - RemoveUnused: removes unused parameters, responses and definitions after expansion/flattening +// - Minimal: stops flattening after minimal $ref processing, leaving schema constructs untouched +// - Expand: expand all $ref's in the document (inoperant if Minimal set to true) +// - Verbose: croaks about name conflicts detected +// - RemoveUnused: removes unused parameters, responses and definitions after expansion/flattening // // NOTE: expansion removes all $ref save circular $ref, which remain in place // // TODO: additional options -// - ProgagateNameExtensions: ensure that created entries properly follow naming rules when their parent have set a -// x-go-name extension -// - LiftAllOfs: -// - limit the flattening of allOf members when simple objects -// - merge allOf with validation only -// - merge allOf with extensions only -// - ... -// +// - ProgagateNameExtensions: ensure that created entries properly follow naming rules when their parent have set a +// x-go-name extension +// - LiftAllOfs: +// - limit the flattening of allOf members when simple objects +// - merge allOf with validation only +// - merge allOf with extensions only +// - ... func Flatten(opts FlattenOpts) error { debugLog("FlattenOpts: %#v", opts) @@ -270,6 +267,12 @@ func nameInlinedSchemas(opts *FlattenOpts) error { } func removeUnused(opts *FlattenOpts) { + for removeUnusedSinglePass(opts) { + // continue until no unused definition remains + } +} + +func removeUnusedSinglePass(opts *FlattenOpts) (hasRemoved bool) { expected := make(map[string]struct{}) for k := range opts.Swagger().Definitions { expected[path.Join(definitionsPath, jsonpointer.Escape(k))] = struct{}{} @@ -280,6 +283,7 @@ func removeUnused(opts *FlattenOpts) { } for k := range expected { + hasRemoved = true debugLog("removing unused definition %s", path.Base(k)) if opts.Verbose { log.Printf("info: removing unused definition: %s", path.Base(k)) @@ -288,6 +292,8 @@ func removeUnused(opts *FlattenOpts) { } opts.Spec.reload() // re-analyze + + return hasRemoved } func importKnownRef(entry sortref.RefRevIdx, refStr, newName string, opts *FlattenOpts) error { @@ -334,7 +340,7 @@ func importNewRef(entry sortref.RefRevIdx, refStr string, opts *FlattenOpts) err } // generate a unique name - isOAIGen means that a naming conflict was resolved by changing the name - newName, isOAIGen = uniqifyName(opts.Swagger().Definitions, nameFromRef(entry.Ref)) + newName, isOAIGen = uniqifyName(opts.Swagger().Definitions, nameFromRef(entry.Ref, opts)) debugLog("new name for [%s]: %s - with name conflict:%t", strings.Join(entry.Keys, ", "), newName, isOAIGen) opts.flattenContext.resolved[refStr] = newName @@ -488,9 +494,9 @@ func stripPointersAndOAIGen(opts *FlattenOpts) error { // stripOAIGen strips the spec from unnecessary OAIGen constructs, initially created to dedupe flattened definitions. // // A dedupe is deemed unnecessary whenever: -// - the only conflict is with its (single) parent: OAIGen is merged into its parent (reinlining) -// - there is a conflict with multiple parents: merge OAIGen in first parent, the rewrite other parents to point to -// the first parent. +// - the only conflict is with its (single) parent: OAIGen is merged into its parent (reinlining) +// - there is a conflict with multiple parents: merge OAIGen in first parent, the rewrite other parents to point to +// the first parent. // // This function returns true whenever it re-inlined a complex schema, so the caller may chose to iterate // pointer and name resolution again. @@ -652,6 +658,7 @@ func namePointers(opts *FlattenOpts) error { refsToReplace := make(map[string]SchemaRef, len(opts.Spec.references.schemas)) for k, ref := range opts.Spec.references.allRefs { + debugLog("name pointers: %q => %#v", k, ref) if path.Dir(ref.String()) == definitionsPath { // this a ref to a top-level definition: ok continue @@ -769,6 +776,10 @@ func flattenAnonPointer(key string, v SchemaRef, refsToReplace map[string]Schema // identifying edge case when the namer did nothing because we point to a non-schema object // no definition is created and we expand the $ref for all callers + debugLog("decide what to do with the schema pointed to: asch.IsSimpleSchema=%t, len(callers)=%d, parts.IsSharedParam=%t, parts.IsSharedResponse=%t", + asch.IsSimpleSchema, len(callers), parts.IsSharedParam(), parts.IsSharedResponse(), + ) + if (!asch.IsSimpleSchema || len(callers) > 1) && !parts.IsSharedParam() && !parts.IsSharedResponse() { debugLog("replace JSON pointer at [%s] by definition: %s", key, v.Ref.String()) if err := namer.Name(v.Ref.String(), v.Schema, asch); err != nil { @@ -791,6 +802,7 @@ func flattenAnonPointer(key string, v SchemaRef, refsToReplace map[string]Schema return nil } + // everything that is a simple schema and not factorizable is expanded debugLog("expand JSON pointer for key=%s", key) if err := replace.UpdateRefWithSchema(opts.Swagger(), key, v.Schema); err != nil { diff --git a/vendor/github.com/go-openapi/analysis/flatten_name.go b/vendor/github.com/go-openapi/analysis/flatten_name.go index 3ad2ccfbf..c7d7938eb 100644 --- a/vendor/github.com/go-openapi/analysis/flatten_name.go +++ b/vendor/github.com/go-openapi/analysis/flatten_name.go @@ -33,12 +33,14 @@ func (isn *InlineSchemaNamer) Name(key string, schema *spec.Schema, aschema *Ana } // create unique name - newName, isOAIGen := uniqifyName(isn.Spec.Definitions, swag.ToJSONName(name)) + mangle := mangler(isn.opts) + newName, isOAIGen := uniqifyName(isn.Spec.Definitions, mangle(name)) // clone schema sch := schutils.Clone(schema) // replace values on schema + debugLog("rewriting schema to ref: key=%s with new name: %s", key, newName) if err := replace.RewriteSchemaToRef(isn.Spec, key, spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil { return fmt.Errorf("error while creating definition %q from inline schema: %w", newName, err) @@ -149,13 +151,15 @@ func namesFromKey(parts sortref.SplitKey, aschema *AnalyzedSchema, operations ma startIndex int ) - if parts.IsOperation() { + switch { + case parts.IsOperation(): baseNames, startIndex = namesForOperation(parts, operations) - } - - // definitions - if parts.IsDefinition() { + case parts.IsDefinition(): baseNames, startIndex = namesForDefinition(parts) + default: + // this a non-standard pointer: build a name by concatenating its parts + baseNames = [][]string{parts} + startIndex = len(baseNames) + 1 } result := make([]string, 0, len(baseNames)) @@ -169,6 +173,7 @@ func namesFromKey(parts sortref.SplitKey, aschema *AnalyzedSchema, operations ma } sort.Strings(result) + debugLog("names from parts: %v => %v", parts, result) return result } @@ -256,10 +261,20 @@ func partAdder(aschema *AnalyzedSchema) sortref.PartAdder { } } -func nameFromRef(ref spec.Ref) string { +func mangler(o *FlattenOpts) func(string) string { + if o.KeepNames { + return func(in string) string { return in } + } + + return swag.ToJSONName +} + +func nameFromRef(ref spec.Ref, o *FlattenOpts) string { + mangle := mangler(o) + u := ref.GetURL() if u.Fragment != "" { - return swag.ToJSONName(path.Base(u.Fragment)) + return mangle(path.Base(u.Fragment)) } if u.Path != "" { @@ -267,19 +282,19 @@ func nameFromRef(ref spec.Ref) string { if bn != "" && bn != "/" { ext := path.Ext(bn) if ext != "" { - return swag.ToJSONName(bn[:len(bn)-len(ext)]) + return mangle(bn[:len(bn)-len(ext)]) } - return swag.ToJSONName(bn) + return mangle(bn) } } - return swag.ToJSONName(strings.ReplaceAll(u.Host, ".", " ")) + return mangle(strings.ReplaceAll(u.Host, ".", " ")) } // GenLocation indicates from which section of the specification (models or operations) a definition has been created. // -// This is reflected in the output spec with a "x-go-gen-location" extension. At the moment, this is is provided +// This is reflected in the output spec with a "x-go-gen-location" extension. At the moment, this is provided // for information only. func GenLocation(parts sortref.SplitKey) string { switch { diff --git a/vendor/github.com/go-openapi/analysis/flatten_options.go b/vendor/github.com/go-openapi/analysis/flatten_options.go index c5bb97b0a..c943fe1e8 100644 --- a/vendor/github.com/go-openapi/analysis/flatten_options.go +++ b/vendor/github.com/go-openapi/analysis/flatten_options.go @@ -26,6 +26,7 @@ type FlattenOpts struct { Verbose bool // enable some reporting on possible name conflicts detected RemoveUnused bool // When true, remove unused parameters, responses and definitions after expansion/flattening ContinueOnError bool // Continue when spec expansion issues are found + KeepNames bool // Do not attempt to jsonify names from references when flattening /* Extra keys */ _ struct{} // require keys diff --git a/vendor/github.com/go-openapi/analysis/internal/debug/debug.go b/vendor/github.com/go-openapi/analysis/internal/debug/debug.go index ec0fec022..39f55a97b 100644 --- a/vendor/github.com/go-openapi/analysis/internal/debug/debug.go +++ b/vendor/github.com/go-openapi/analysis/internal/debug/debug.go @@ -29,7 +29,7 @@ // GetLogger provides a prefix debug logger func GetLogger(prefix string, debug bool) func(string, ...interface{}) { if debug { - logger := log.New(output, fmt.Sprintf("%s:", prefix), log.LstdFlags) + logger := log.New(output, prefix+":", log.LstdFlags) return func(msg string, args ...interface{}) { _, file1, pos1, _ := runtime.Caller(1) @@ -37,5 +37,5 @@ func GetLogger(prefix string, debug bool) func(string, ...interface{}) { } } - return func(msg string, args ...interface{}) {} + return func(_ string, _ ...interface{}) {} } diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go b/vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go index 26c2a05a3..c0f43e728 100644 --- a/vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go +++ b/vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go @@ -1,6 +1,7 @@ package replace import ( + "encoding/json" "fmt" "net/url" "os" @@ -40,6 +41,8 @@ func RewriteSchemaToRef(sp *spec.Swagger, key string, ref spec.Ref) error { if refable.Schema != nil { refable.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} } + case map[string]interface{}: // this happens e.g. if a schema points to an extension unmarshaled as map[string]interface{} + return rewriteParentRef(sp, key, ref) default: return fmt.Errorf("no schema with ref found at %s for %T", key, value) } @@ -120,6 +123,9 @@ func rewriteParentRef(sp *spec.Swagger, key string, ref spec.Ref) error { case spec.SchemaProperties: container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + case *interface{}: + *container = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + // NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema default: @@ -318,8 +324,8 @@ type DeepestRefResult struct { } // DeepestRef finds the first definition ref, from a cascade of nested refs which are not definitions. -// - if no definition is found, returns the deepest ref. -// - pointers to external files are expanded +// - if no definition is found, returns the deepest ref. +// - pointers to external files are expanded // // NOTE: all external $ref's are assumed to be already expanded at this stage. func DeepestRef(sp *spec.Swagger, opts *spec.ExpandOptions, ref spec.Ref) (*DeepestRefResult, error) { @@ -385,8 +391,9 @@ func DeepestRef(sp *spec.Swagger, opts *spec.ExpandOptions, ref spec.Ref) (*Deep err := asSchema.UnmarshalJSON(asJSON) if err != nil { return nil, - fmt.Errorf("invalid type for resolved JSON pointer %s. Expected a schema a, got: %T", - currentRef.String(), value) + fmt.Errorf("invalid type for resolved JSON pointer %s. Expected a schema a, got: %T (%v)", + currentRef.String(), value, err, + ) } warnings = append(warnings, fmt.Sprintf("found $ref %q (response) interpreted as schema", currentRef.String())) @@ -402,8 +409,9 @@ func DeepestRef(sp *spec.Swagger, opts *spec.ExpandOptions, ref spec.Ref) (*Deep var asSchema spec.Schema if err := asSchema.UnmarshalJSON(asJSON); err != nil { return nil, - fmt.Errorf("invalid type for resolved JSON pointer %s. Expected a schema a, got: %T", - currentRef.String(), value) + fmt.Errorf("invalid type for resolved JSON pointer %s. Expected a schema a, got: %T (%v)", + currentRef.String(), value, err, + ) } warnings = append(warnings, fmt.Sprintf("found $ref %q (parameter) interpreted as schema", currentRef.String())) @@ -414,9 +422,25 @@ func DeepestRef(sp *spec.Swagger, opts *spec.ExpandOptions, ref spec.Ref) (*Deep currentRef = asSchema.Ref default: - return nil, - fmt.Errorf("unhandled type to resolve JSON pointer %s. Expected a Schema, got: %T", - currentRef.String(), value) + // fallback: attempts to resolve the pointer as a schema + if refable == nil { + break DOWNREF + } + + asJSON, _ := json.Marshal(refable) + var asSchema spec.Schema + if err := asSchema.UnmarshalJSON(asJSON); err != nil { + return nil, + fmt.Errorf("unhandled type to resolve JSON pointer %s. Expected a Schema, got: %T (%v)", + currentRef.String(), value, err, + ) + } + warnings = append(warnings, fmt.Sprintf("found $ref %q (%T) interpreted as schema", currentRef.String(), refable)) + + if asSchema.Ref.String() == "" { + break DOWNREF + } + currentRef = asSchema.Ref } } diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go b/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go index 18e552ead..ac80fc2e8 100644 --- a/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go +++ b/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go @@ -69,7 +69,7 @@ func KeyParts(key string) SplitKey { return res } -// SplitKey holds of the parts of a /-separated key, soi that their location may be determined. +// SplitKey holds of the parts of a /-separated key, so that their location may be determined. type SplitKey []string // IsDefinition is true when the split key is in the #/definitions section of a spec diff --git a/vendor/github.com/go-openapi/analysis/mixin.go b/vendor/github.com/go-openapi/analysis/mixin.go index b25305264..7785a29b2 100644 --- a/vendor/github.com/go-openapi/analysis/mixin.go +++ b/vendor/github.com/go-openapi/analysis/mixin.go @@ -53,7 +53,7 @@ // collisions. func Mixin(primary *spec.Swagger, mixins ...*spec.Swagger) []string { skipped := make([]string, 0, len(mixins)) - opIds := getOpIds(primary) + opIDs := getOpIDs(primary) initPrimary(primary) for i, m := range mixins { @@ -74,7 +74,7 @@ func Mixin(primary *spec.Swagger, mixins ...*spec.Swagger) []string { skipped = append(skipped, mergeDefinitions(primary, m)...) // merging paths requires a map of operationIDs to work with - skipped = append(skipped, mergePaths(primary, m, opIds, i)...) + skipped = append(skipped, mergePaths(primary, m, opIDs, i)...) skipped = append(skipped, mergeParameters(primary, m)...) @@ -84,9 +84,9 @@ func Mixin(primary *spec.Swagger, mixins ...*spec.Swagger) []string { return skipped } -// getOpIds extracts all the paths..operationIds from the given +// getOpIDs extracts all the paths..operationIds from the given // spec and returns them as the keys in a map with 'true' values. -func getOpIds(s *spec.Swagger) map[string]bool { +func getOpIDs(s *spec.Swagger) map[string]bool { rv := make(map[string]bool) if s.Paths == nil { return rv @@ -179,7 +179,7 @@ func mergeDefinitions(primary *spec.Swagger, m *spec.Swagger) (skipped []string) return } -func mergePaths(primary *spec.Swagger, m *spec.Swagger, opIds map[string]bool, mixIndex int) (skipped []string) { +func mergePaths(primary *spec.Swagger, m *spec.Swagger, opIDs map[string]bool, mixIndex int) (skipped []string) { if m.Paths != nil { for k, v := range m.Paths.Paths { if _, exists := primary.Paths.Paths[k]; exists { @@ -198,10 +198,10 @@ func mergePaths(primary *spec.Swagger, m *spec.Swagger, opIds map[string]bool, m // all the proivded specs are already unique. piops := pathItemOps(v) for _, piop := range piops { - if opIds[piop.ID] { + if opIDs[piop.ID] { piop.ID = fmt.Sprintf("%v%v%v", piop.ID, "Mixin", mixIndex) } - opIds[piop.ID] = true + opIDs[piop.ID] = true } primary.Paths.Paths[k] = v } @@ -367,7 +367,7 @@ func mergeSwaggerProps(primary *spec.Swagger, m *spec.Swagger) []string { return skipped } -// nolint: unparam +//nolint:unparam func mergeExternalDocs(primary *spec.ExternalDocumentation, m *spec.ExternalDocumentation) []string { if primary.Description == "" { primary.Description = m.Description diff --git a/vendor/github.com/go-openapi/analysis/schema.go b/vendor/github.com/go-openapi/analysis/schema.go index fc055095c..ab190db5b 100644 --- a/vendor/github.com/go-openapi/analysis/schema.go +++ b/vendor/github.com/go-openapi/analysis/schema.go @@ -1,7 +1,7 @@ package analysis import ( - "fmt" + "errors" "github.com/go-openapi/spec" "github.com/go-openapi/strfmt" @@ -19,7 +19,7 @@ type SchemaOpts struct { // patterns. func Schema(opts SchemaOpts) (*AnalyzedSchema, error) { if opts.Schema == nil { - return nil, fmt.Errorf("no schema to analyze") + return nil, errors.New("no schema to analyze") } a := &AnalyzedSchema{ @@ -247,10 +247,10 @@ func (a *AnalyzedSchema) isArrayType() bool { // isAnalyzedAsComplex determines if an analyzed schema is eligible to flattening (i.e. it is "complex"). // // Complex means the schema is any of: -// - a simple type (primitive) -// - an array of something (items are possibly complex ; if this is the case, items will generate a definition) -// - a map of something (additionalProperties are possibly complex ; if this is the case, additionalProperties will -// generate a definition) +// - a simple type (primitive) +// - an array of something (items are possibly complex ; if this is the case, items will generate a definition) +// - a map of something (additionalProperties are possibly complex ; if this is the case, additionalProperties will +// generate a definition) func (a *AnalyzedSchema) isAnalyzedAsComplex() bool { return !a.IsSimpleSchema && !a.IsArray && !a.IsMap } diff --git a/vendor/github.com/go-openapi/errors/.golangci.yml b/vendor/github.com/go-openapi/errors/.golangci.yml index 4e1fc0c7d..cf88ead32 100644 --- a/vendor/github.com/go-openapi/errors/.golangci.yml +++ b/vendor/github.com/go-openapi/errors/.golangci.yml @@ -4,45 +4,59 @@ linters-settings: golint: min-confidence: 0 gocyclo: - min-complexity: 30 + min-complexity: 45 maligned: suggest-new: true dupl: - threshold: 100 + threshold: 200 goconst: min-len: 2 - min-occurrences: 4 + min-occurrences: 3 + linters: enable-all: true disable: + - errname # this repo doesn't follow the convention advised by this linter - maligned + - unparam - lll + - gochecknoinits - gochecknoglobals + - funlen - godox - gocognit - whitespace - wsl - - funlen - - gochecknoglobals - - gochecknoinits - - scopelint - wrapcheck - - exhaustivestruct - - exhaustive - - nlreturn - testpackage - - gci - - gofumpt - - goerr113 + - nlreturn - gomnd - - tparallel + - exhaustivestruct + - goerr113 + - errorlint - nestif - godot - - errorlint + - gofumpt - paralleltest - tparallel - - cyclop - - errname - - varnamelen + - thelper + - ifshort - exhaustruct - - maintidx + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/errors/README.md b/vendor/github.com/go-openapi/errors/README.md index 4aac049e6..6d57ea55c 100644 --- a/vendor/github.com/go-openapi/errors/README.md +++ b/vendor/github.com/go-openapi/errors/README.md @@ -1,11 +1,8 @@ -# OpenAPI errors +# OpenAPI errors [![Build Status](https://github.com/go-openapi/errors/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/errors/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/errors/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/errors) -[![Build Status](https://travis-ci.org/go-openapi/errors.svg?branch=master)](https://travis-ci.org/go-openapi/errors) -[![codecov](https://codecov.io/gh/go-openapi/errors/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/errors) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) [![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/errors/master/LICENSE) [![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/errors.svg)](https://pkg.go.dev/github.com/go-openapi/errors) -[![GolangCI](https://golangci.com/badges/github.com/go-openapi/errors.svg)](https://golangci.com) [![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/errors)](https://goreportcard.com/report/github.com/go-openapi/errors) Shared errors and error interface used throughout the various libraries found in the go-openapi toolkit. diff --git a/vendor/github.com/go-openapi/errors/api.go b/vendor/github.com/go-openapi/errors/api.go index c13f3435f..5320cb963 100644 --- a/vendor/github.com/go-openapi/errors/api.go +++ b/vendor/github.com/go-openapi/errors/api.go @@ -55,9 +55,15 @@ func (a apiError) MarshalJSON() ([]byte, error) { // New creates a new API error with a code and a message func New(code int32, message string, args ...interface{}) Error { if len(args) > 0 { - return &apiError{code, fmt.Sprintf(message, args...)} + return &apiError{ + code: code, + message: fmt.Sprintf(message, args...), + } + } + return &apiError{ + code: code, + message: message, } - return &apiError{code, message} } // NotFound creates a new not found error @@ -130,10 +136,14 @@ func flattenComposite(errs *CompositeError) *CompositeError { // MethodNotAllowed creates a new method not allowed error func MethodNotAllowed(requested string, allow []string) Error { msg := fmt.Sprintf("method %s is not allowed, but [%s] are", requested, strings.Join(allow, ",")) - return &MethodNotAllowedError{code: http.StatusMethodNotAllowed, Allowed: allow, message: msg} + return &MethodNotAllowedError{ + code: http.StatusMethodNotAllowed, + Allowed: allow, + message: msg, + } } -// ServeError the error handler interface implementation +// ServeError implements the http error handler interface func ServeError(rw http.ResponseWriter, r *http.Request, err error) { rw.Header().Set("Content-Type", "application/json") switch e := err.(type) { diff --git a/vendor/github.com/go-openapi/errors/schema.go b/vendor/github.com/go-openapi/errors/schema.go index da5f6c78c..cf7ac2ed4 100644 --- a/vendor/github.com/go-openapi/errors/schema.go +++ b/vendor/github.com/go-openapi/errors/schema.go @@ -120,6 +120,10 @@ func (c *CompositeError) Error() string { return c.message } +func (c *CompositeError) Unwrap() []error { + return c.Errors +} + // MarshalJSON implements the JSON encoding interface func (c CompositeError) MarshalJSON() ([]byte, error) { return json.Marshal(map[string]interface{}{ @@ -133,7 +137,7 @@ func (c CompositeError) MarshalJSON() ([]byte, error) { func CompositeValidationError(errors ...error) *CompositeError { return &CompositeError{ code: CompositeErrorCode, - Errors: append([]error{}, errors...), + Errors: append(make([]error, 0, len(errors)), errors...), message: "validation failure list", } } diff --git a/vendor/github.com/go-openapi/inflect/.gitignore b/vendor/github.com/go-openapi/inflect/.gitignore new file mode 100644 index 000000000..87c3bd3e6 --- /dev/null +++ b/vendor/github.com/go-openapi/inflect/.gitignore @@ -0,0 +1,5 @@ +secrets.yml +coverage.out +coverage.txt +*.cov +.idea diff --git a/vendor/github.com/go-openapi/inflect/.golangci.yml b/vendor/github.com/go-openapi/inflect/.golangci.yml new file mode 100644 index 000000000..22f8d21cc --- /dev/null +++ b/vendor/github.com/go-openapi/inflect/.golangci.yml @@ -0,0 +1,61 @@ +linters-settings: + govet: + check-shadowing: true + golint: + min-confidence: 0 + gocyclo: + min-complexity: 45 + maligned: + suggest-new: true + dupl: + threshold: 200 + goconst: + min-len: 2 + min-occurrences: 3 + +linters: + enable-all: true + disable: + - maligned + - unparam + - lll + - gochecknoinits + - gochecknoglobals + - funlen + - godox + - gocognit + - whitespace + - wsl + - wrapcheck + - testpackage + - nlreturn + - gomnd + - exhaustivestruct + - goerr113 + - errorlint + - nestif + - godot + - gofumpt + - paralleltest + - tparallel + - thelper + - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/inflect/.hgignore b/vendor/github.com/go-openapi/inflect/.hgignore deleted file mode 100644 index 6cc3d7ce1..000000000 --- a/vendor/github.com/go-openapi/inflect/.hgignore +++ /dev/null @@ -1 +0,0 @@ -swp$ diff --git a/vendor/github.com/go-openapi/inflect/LICENCE b/vendor/github.com/go-openapi/inflect/LICENSE similarity index 100% rename from vendor/github.com/go-openapi/inflect/LICENCE rename to vendor/github.com/go-openapi/inflect/LICENSE diff --git a/vendor/github.com/go-openapi/inflect/README b/vendor/github.com/go-openapi/inflect/README deleted file mode 100644 index 014699a22..000000000 --- a/vendor/github.com/go-openapi/inflect/README +++ /dev/null @@ -1,168 +0,0 @@ -INSTALLATION - -go get bitbucket.org/pkg/inflect - -PACKAGE - -package inflect - - -FUNCTIONS - -func AddAcronym(word string) - -func AddHuman(suffix, replacement string) - -func AddIrregular(singular, plural string) - -func AddPlural(suffix, replacement string) - -func AddSingular(suffix, replacement string) - -func AddUncountable(word string) - -func Asciify(word string) string - -func Camelize(word string) string - -func CamelizeDownFirst(word string) string - -func Capitalize(word string) string - -func Dasherize(word string) string - -func ForeignKey(word string) string - -func ForeignKeyCondensed(word string) string - -func Humanize(word string) string - -func Ordinalize(word string) string - -func Parameterize(word string) string - -func ParameterizeJoin(word, sep string) string - -func Pluralize(word string) string - -func Singularize(word string) string - -func Tableize(word string) string - -func Titleize(word string) string - -func Typeify(word string) string - -func Uncountables() map[string]bool - -func Underscore(word string) string - - -TYPES - -type Rule struct { - // contains filtered or unexported fields -} -used by rulesets - -type Ruleset struct { - // contains filtered or unexported fields -} -a Ruleset is the config of pluralization rules -you can extend the rules with the Add* methods - -func NewDefaultRuleset() *Ruleset -create a new ruleset and load it with the default -set of common English pluralization rules - -func NewRuleset() *Ruleset -create a blank ruleset. Unless you are going to -build your own rules from scratch you probably -won't need this and can just use the defaultRuleset -via the global inflect.* methods - -func (rs *Ruleset) AddAcronym(word string) -if you use acronym you may need to add them to the ruleset -to prevent Underscored words of things like "HTML" coming out -as "h_t_m_l" - -func (rs *Ruleset) AddHuman(suffix, replacement string) -Human rules are applied by humanize to show more friendly -versions of words - -func (rs *Ruleset) AddIrregular(singular, plural string) -Add any inconsistant pluralizing/sinularizing rules -to the set here. - -func (rs *Ruleset) AddPlural(suffix, replacement string) -add a pluralization rule - -func (rs *Ruleset) AddPluralExact(suffix, replacement string, exact bool) -add a pluralization rule with full string match - -func (rs *Ruleset) AddSingular(suffix, replacement string) -add a singular rule - -func (rs *Ruleset) AddSingularExact(suffix, replacement string, exact bool) -same as AddSingular but you can set `exact` to force -a full string match - -func (rs *Ruleset) AddUncountable(word string) -add a word to this ruleset that has the same singular and plural form -for example: "rice" - -func (rs *Ruleset) Asciify(word string) string -transforms latin characters like รฉ -> e - -func (rs *Ruleset) Camelize(word string) string -"dino_party" -> "DinoParty" - -func (rs *Ruleset) CamelizeDownFirst(word string) string -same as Camelcase but with first letter downcased - -func (rs *Ruleset) Capitalize(word string) string -uppercase first character - -func (rs *Ruleset) Dasherize(word string) string -"SomeText" -> "some-text" - -func (rs *Ruleset) ForeignKey(word string) string -an underscored foreign key name "Person" -> "person_id" - -func (rs *Ruleset) ForeignKeyCondensed(word string) string -a foreign key (with an underscore) "Person" -> "personid" - -func (rs *Ruleset) Humanize(word string) string -First letter of sentance captitilized -Uses custom friendly replacements via AddHuman() - -func (rs *Ruleset) Ordinalize(str string) string -"1031" -> "1031st" - -func (rs *Ruleset) Parameterize(word string) string -param safe dasherized names like "my-param" - -func (rs *Ruleset) ParameterizeJoin(word, sep string) string -param safe dasherized names with custom seperator - -func (rs *Ruleset) Pluralize(word string) string -returns the plural form of a singular word - -func (rs *Ruleset) Singularize(word string) string -returns the singular form of a plural word - -func (rs *Ruleset) Tableize(word string) string -Rails style pluralized table names: "SuperPerson" -> "super_people" - -func (rs *Ruleset) Titleize(word string) string -Captitilize every word in sentance "hello there" -> "Hello There" - -func (rs *Ruleset) Typeify(word string) string -"something_like_this" -> "SomethingLikeThis" - -func (rs *Ruleset) Uncountables() map[string]bool - -func (rs *Ruleset) Underscore(word string) string -lowercase underscore version "BigBen" -> "big_ben" - - diff --git a/vendor/github.com/go-openapi/inflect/README.md b/vendor/github.com/go-openapi/inflect/README.md new file mode 100644 index 000000000..187b23b93 --- /dev/null +++ b/vendor/github.com/go-openapi/inflect/README.md @@ -0,0 +1,18 @@ +# inflect [![Build Status](https://github.com/go-openapi/inflect/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/inflect/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/inflect/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/inflect) + +[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/inflect/master/LICENSE) +[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/inflect.svg)](https://pkg.go.dev/github.com/go-openapi/inflect) +[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/inflect)](https://goreportcard.com/report/github.com/go-openapi/inflect) + +A package to pluralize words. + +Originally forked from fork of https://bitbucket.org/pkg/inflect under a MIT License. + +A golang library applying grammar rules to English words. + +> This package provides a basic set of functions applying +> grammar rules to inflect English words, modify case style +> (Capitalize, camelCase, snake_case, etc.). +> +> Acronyms are properly handled. A common use case is word pluralization. diff --git a/vendor/github.com/go-openapi/inflect/inflect.go b/vendor/github.com/go-openapi/inflect/inflect.go index 3008844ca..9d8ca6dbd 100644 --- a/vendor/github.com/go-openapi/inflect/inflect.go +++ b/vendor/github.com/go-openapi/inflect/inflect.go @@ -19,12 +19,11 @@ type Rule struct { // a Ruleset is the config of pluralization rules // you can extend the rules with the Add* methods type Ruleset struct { - uncountables map[string]bool - plurals []*Rule - singulars []*Rule - humans []*Rule - acronyms []*Rule - acronymMatcher *regexp.Regexp + uncountables map[string]bool + plurals []*Rule + singulars []*Rule + humans []*Rule + acronyms []*Rule } // create a blank ruleset. Unless you are going to @@ -282,7 +281,7 @@ func (rs *Ruleset) AddHuman(suffix, replacement string) { rs.humans = append([]*Rule{r}, rs.humans...) } -// Add any inconsistant pluralizing/sinularizing rules +// Add any inconsistent pluralizing/sinularizing rules // to the set here. func (rs *Ruleset) AddIrregular(singular, plural string) { delete(rs.uncountables, singular) @@ -387,7 +386,7 @@ func (rs *Ruleset) Titleize(word string) string { func (rs *Ruleset) safeCaseAcronyms(word string) string { // convert an acroymn like HTML into Html for _, rule := range rs.acronyms { - word = strings.Replace(word, rule.suffix, rule.replacement, -1) + word = strings.ReplaceAll(word, rule.suffix, rule.replacement) } return word } @@ -409,7 +408,7 @@ func (rs *Ruleset) Humanize(word string) string { word = replaceLast(word, "_id", "") // strip foreign key kinds // replace and strings in humans list for _, rule := range rs.humans { - word = strings.Replace(word, rule.suffix, rule.replacement, -1) + word = strings.ReplaceAll(word, rule.suffix, rule.replacement) } sentance := rs.seperatedWords(word, " ") return strings.ToUpper(sentance[:1]) + sentance[1:] @@ -430,19 +429,19 @@ func (rs *Ruleset) Tableize(word string) string { return rs.Pluralize(rs.Underscore(rs.Typeify(word))) } -var notUrlSafe *regexp.Regexp = regexp.MustCompile(`[^\w\d\-_ ]`) +var notURLSafe = regexp.MustCompile(`[^\w\d\-_ ]`) // param safe dasherized names like "my-param" func (rs *Ruleset) Parameterize(word string) string { return ParameterizeJoin(word, "-") } -// param safe dasherized names with custom seperator +// param safe dasherized names with custom separator func (rs *Ruleset) ParameterizeJoin(word, sep string) string { word = strings.ToLower(word) word = rs.Asciify(word) - word = notUrlSafe.ReplaceAllString(word, "") - word = strings.Replace(word, " ", sep, -1) + word = notURLSafe.ReplaceAllString(word, "") + word = strings.ReplaceAll(word, " ", sep) if len(sep) > 0 { squash, err := regexp.Compile(sep + "+") if err == nil { @@ -453,7 +452,7 @@ func (rs *Ruleset) ParameterizeJoin(word, sep string) string { return word } -var lookalikes map[string]*regexp.Regexp = map[string]*regexp.Regexp{ +var lookalikes = map[string]*regexp.Regexp{ "A": regexp.MustCompile(`ร€|ร|ร‚|รƒ|ร„|ร…`), "AE": regexp.MustCompile(`ร†`), "C": regexp.MustCompile(`ร‡`), @@ -487,7 +486,7 @@ func (rs *Ruleset) Asciify(word string) string { return word } -var tablePrefix *regexp.Regexp = regexp.MustCompile(`^[^.]*\.`) +var tablePrefix = regexp.MustCompile(`^[^.]*\.`) // "something_like_this" -> "SomethingLikeThis" func (rs *Ruleset) Typeify(word string) string { @@ -642,13 +641,13 @@ func reverse(s string) string { func isSpacerChar(c rune) bool { switch { - case c == rune("_"[0]): + case c == '_': return true - case c == rune(" "[0]): + case c == ':': return true - case c == rune(":"[0]): + case c == '-': return true - case c == rune("-"[0]): + case unicode.IsSpace(c): return true } return false diff --git a/vendor/github.com/go-openapi/jsonpointer/.golangci.yml b/vendor/github.com/go-openapi/jsonpointer/.golangci.yml new file mode 100644 index 000000000..22f8d21cc --- /dev/null +++ b/vendor/github.com/go-openapi/jsonpointer/.golangci.yml @@ -0,0 +1,61 @@ +linters-settings: + govet: + check-shadowing: true + golint: + min-confidence: 0 + gocyclo: + min-complexity: 45 + maligned: + suggest-new: true + dupl: + threshold: 200 + goconst: + min-len: 2 + min-occurrences: 3 + +linters: + enable-all: true + disable: + - maligned + - unparam + - lll + - gochecknoinits + - gochecknoglobals + - funlen + - godox + - gocognit + - whitespace + - wsl + - wrapcheck + - testpackage + - nlreturn + - gomnd + - exhaustivestruct + - goerr113 + - errorlint + - nestif + - godot + - gofumpt + - paralleltest + - tparallel + - thelper + - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/jsonpointer/README.md b/vendor/github.com/go-openapi/jsonpointer/README.md index 813788aff..0108f1d57 100644 --- a/vendor/github.com/go-openapi/jsonpointer/README.md +++ b/vendor/github.com/go-openapi/jsonpointer/README.md @@ -1,6 +1,10 @@ -# gojsonpointer [![Build Status](https://travis-ci.org/go-openapi/jsonpointer.svg?branch=master)](https://travis-ci.org/go-openapi/jsonpointer) [![codecov](https://codecov.io/gh/go-openapi/jsonpointer/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/jsonpointer) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +# gojsonpointer [![Build Status](https://github.com/go-openapi/jsonpointer/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/jsonpointer/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/jsonpointer/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/jsonpointer) + +[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/jsonpointer/master/LICENSE) +[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/jsonpointer.svg)](https://pkg.go.dev/github.com/go-openapi/jsonpointer) +[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/jsonpointer)](https://goreportcard.com/report/github.com/go-openapi/jsonpointer) -[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/jsonpointer/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/jsonpointer?status.svg)](http://godoc.org/github.com/go-openapi/jsonpointer) An implementation of JSON Pointer - Go language ## Status diff --git a/vendor/github.com/go-openapi/jsonpointer/pointer.go b/vendor/github.com/go-openapi/jsonpointer/pointer.go index 7df9853de..d970c7cf4 100644 --- a/vendor/github.com/go-openapi/jsonpointer/pointer.go +++ b/vendor/github.com/go-openapi/jsonpointer/pointer.go @@ -26,6 +26,7 @@ package jsonpointer import ( + "encoding/json" "errors" "fmt" "reflect" @@ -40,6 +41,7 @@ pointerSeparator = `/` invalidStart = `JSON pointer must be empty or start with a "` + pointerSeparator + notFound = `Can't find the pointer in the document` ) var jsonPointableType = reflect.TypeOf(new(JSONPointable)).Elem() @@ -48,13 +50,13 @@ // JSONPointable is an interface for structs to implement when they need to customize the // json pointer process type JSONPointable interface { - JSONLookup(string) (interface{}, error) + JSONLookup(string) (any, error) } // JSONSetable is an interface for structs to implement when they need to customize the // json pointer process type JSONSetable interface { - JSONSet(string, interface{}) error + JSONSet(string, any) error } // New creates a new json pointer for the given string @@ -81,9 +83,7 @@ func (p *Pointer) parse(jsonPointerString string) error { err = errors.New(invalidStart) } else { referenceTokens := strings.Split(jsonPointerString, pointerSeparator) - for _, referenceToken := range referenceTokens[1:] { - p.referenceTokens = append(p.referenceTokens, referenceToken) - } + p.referenceTokens = append(p.referenceTokens, referenceTokens[1:]...) } } @@ -91,38 +91,58 @@ func (p *Pointer) parse(jsonPointerString string) error { } // Get uses the pointer to retrieve a value from a JSON document -func (p *Pointer) Get(document interface{}) (interface{}, reflect.Kind, error) { +func (p *Pointer) Get(document any) (any, reflect.Kind, error) { return p.get(document, swag.DefaultJSONNameProvider) } // Set uses the pointer to set a value from a JSON document -func (p *Pointer) Set(document interface{}, value interface{}) (interface{}, error) { +func (p *Pointer) Set(document any, value any) (any, error) { return document, p.set(document, value, swag.DefaultJSONNameProvider) } // GetForToken gets a value for a json pointer token 1 level deep -func GetForToken(document interface{}, decodedToken string) (interface{}, reflect.Kind, error) { +func GetForToken(document any, decodedToken string) (any, reflect.Kind, error) { return getSingleImpl(document, decodedToken, swag.DefaultJSONNameProvider) } // SetForToken gets a value for a json pointer token 1 level deep -func SetForToken(document interface{}, decodedToken string, value interface{}) (interface{}, error) { +func SetForToken(document any, decodedToken string, value any) (any, error) { return document, setSingleImpl(document, value, decodedToken, swag.DefaultJSONNameProvider) } -func getSingleImpl(node interface{}, decodedToken string, nameProvider *swag.NameProvider) (interface{}, reflect.Kind, error) { +func isNil(input any) bool { + if input == nil { + return true + } + + kind := reflect.TypeOf(input).Kind() + switch kind { //nolint:exhaustive + case reflect.Ptr, reflect.Map, reflect.Slice, reflect.Chan: + return reflect.ValueOf(input).IsNil() + default: + return false + } +} + +func getSingleImpl(node any, decodedToken string, nameProvider *swag.NameProvider) (any, reflect.Kind, error) { rValue := reflect.Indirect(reflect.ValueOf(node)) kind := rValue.Kind() + if isNil(node) { + return nil, kind, fmt.Errorf("nil value has not field %q", decodedToken) + } - if rValue.Type().Implements(jsonPointableType) { - r, err := node.(JSONPointable).JSONLookup(decodedToken) + switch typed := node.(type) { + case JSONPointable: + r, err := typed.JSONLookup(decodedToken) if err != nil { return nil, kind, err } return r, kind, nil + case *any: // case of a pointer to interface, that is not resolved by reflect.Indirect + return getSingleImpl(*typed, decodedToken, nameProvider) } - switch kind { + switch kind { //nolint:exhaustive case reflect.Struct: nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken) if !ok { @@ -159,7 +179,7 @@ func getSingleImpl(node interface{}, decodedToken string, nameProvider *swag.Nam } -func setSingleImpl(node, data interface{}, decodedToken string, nameProvider *swag.NameProvider) error { +func setSingleImpl(node, data any, decodedToken string, nameProvider *swag.NameProvider) error { rValue := reflect.Indirect(reflect.ValueOf(node)) if ns, ok := node.(JSONSetable); ok { // pointer impl @@ -170,7 +190,7 @@ func setSingleImpl(node, data interface{}, decodedToken string, nameProvider *sw return node.(JSONSetable).JSONSet(decodedToken, data) } - switch rValue.Kind() { + switch rValue.Kind() { //nolint:exhaustive case reflect.Struct: nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken) if !ok { @@ -210,7 +230,7 @@ func setSingleImpl(node, data interface{}, decodedToken string, nameProvider *sw } -func (p *Pointer) get(node interface{}, nameProvider *swag.NameProvider) (interface{}, reflect.Kind, error) { +func (p *Pointer) get(node any, nameProvider *swag.NameProvider) (any, reflect.Kind, error) { if nameProvider == nil { nameProvider = swag.DefaultJSONNameProvider @@ -231,8 +251,7 @@ func (p *Pointer) get(node interface{}, nameProvider *swag.NameProvider) (interf if err != nil { return nil, knd, err } - node, kind = r, knd - + node = r } rValue := reflect.ValueOf(node) @@ -241,11 +260,11 @@ func (p *Pointer) get(node interface{}, nameProvider *swag.NameProvider) (interf return node, kind, nil } -func (p *Pointer) set(node, data interface{}, nameProvider *swag.NameProvider) error { +func (p *Pointer) set(node, data any, nameProvider *swag.NameProvider) error { knd := reflect.ValueOf(node).Kind() if knd != reflect.Ptr && knd != reflect.Struct && knd != reflect.Map && knd != reflect.Slice && knd != reflect.Array { - return fmt.Errorf("only structs, pointers, maps and slices are supported for setting values") + return errors.New("only structs, pointers, maps and slices are supported for setting values") } if nameProvider == nil { @@ -284,7 +303,7 @@ func (p *Pointer) set(node, data interface{}, nameProvider *swag.NameProvider) e continue } - switch kind { + switch kind { //nolint:exhaustive case reflect.Struct: nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken) if !ok { @@ -363,6 +382,128 @@ func (p *Pointer) String() string { return pointerString } +func (p *Pointer) Offset(document string) (int64, error) { + dec := json.NewDecoder(strings.NewReader(document)) + var offset int64 + for _, ttk := range p.DecodedTokens() { + tk, err := dec.Token() + if err != nil { + return 0, err + } + switch tk := tk.(type) { + case json.Delim: + switch tk { + case '{': + offset, err = offsetSingleObject(dec, ttk) + if err != nil { + return 0, err + } + case '[': + offset, err = offsetSingleArray(dec, ttk) + if err != nil { + return 0, err + } + default: + return 0, fmt.Errorf("invalid token %#v", tk) + } + default: + return 0, fmt.Errorf("invalid token %#v", tk) + } + } + return offset, nil +} + +func offsetSingleObject(dec *json.Decoder, decodedToken string) (int64, error) { + for dec.More() { + offset := dec.InputOffset() + tk, err := dec.Token() + if err != nil { + return 0, err + } + switch tk := tk.(type) { + case json.Delim: + switch tk { + case '{': + if err = drainSingle(dec); err != nil { + return 0, err + } + case '[': + if err = drainSingle(dec); err != nil { + return 0, err + } + } + case string: + if tk == decodedToken { + return offset, nil + } + default: + return 0, fmt.Errorf("invalid token %#v", tk) + } + } + return 0, fmt.Errorf("token reference %q not found", decodedToken) +} + +func offsetSingleArray(dec *json.Decoder, decodedToken string) (int64, error) { + idx, err := strconv.Atoi(decodedToken) + if err != nil { + return 0, fmt.Errorf("token reference %q is not a number: %v", decodedToken, err) + } + var i int + for i = 0; i < idx && dec.More(); i++ { + tk, err := dec.Token() + if err != nil { + return 0, err + } + + if delim, isDelim := tk.(json.Delim); isDelim { + switch delim { + case '{': + if err = drainSingle(dec); err != nil { + return 0, err + } + case '[': + if err = drainSingle(dec); err != nil { + return 0, err + } + } + } + } + + if !dec.More() { + return 0, fmt.Errorf("token reference %q not found", decodedToken) + } + return dec.InputOffset(), nil +} + +// drainSingle drains a single level of object or array. +// The decoder has to guarantee the beginning delim (i.e. '{' or '[') has been consumed. +func drainSingle(dec *json.Decoder) error { + for dec.More() { + tk, err := dec.Token() + if err != nil { + return err + } + if delim, isDelim := tk.(json.Delim); isDelim { + switch delim { + case '{': + if err = drainSingle(dec); err != nil { + return err + } + case '[': + if err = drainSingle(dec); err != nil { + return err + } + } + } + } + + // Consumes the ending delim + if _, err := dec.Token(); err != nil { + return err + } + return nil +} + // Specific JSON pointer encoding here // ~0 => ~ // ~1 => / @@ -377,14 +518,14 @@ func (p *Pointer) String() string { // Unescape unescapes a json pointer reference token string to the original representation func Unescape(token string) string { - step1 := strings.Replace(token, encRefTok1, decRefTok1, -1) - step2 := strings.Replace(step1, encRefTok0, decRefTok0, -1) + step1 := strings.ReplaceAll(token, encRefTok1, decRefTok1) + step2 := strings.ReplaceAll(step1, encRefTok0, decRefTok0) return step2 } // Escape escapes a pointer reference token string func Escape(token string) string { - step1 := strings.Replace(token, decRefTok0, encRefTok0, -1) - step2 := strings.Replace(step1, decRefTok1, encRefTok1, -1) + step1 := strings.ReplaceAll(token, decRefTok0, encRefTok0) + step2 := strings.ReplaceAll(step1, decRefTok1, encRefTok1) return step2 } diff --git a/vendor/github.com/go-openapi/jsonreference/.golangci.yml b/vendor/github.com/go-openapi/jsonreference/.golangci.yml index 013fc1943..22f8d21cc 100644 --- a/vendor/github.com/go-openapi/jsonreference/.golangci.yml +++ b/vendor/github.com/go-openapi/jsonreference/.golangci.yml @@ -1,50 +1,61 @@ linters-settings: govet: check-shadowing: true + golint: + min-confidence: 0 gocyclo: - min-complexity: 30 + min-complexity: 45 maligned: suggest-new: true dupl: - threshold: 100 + threshold: 200 goconst: min-len: 2 - min-occurrences: 4 - paralleltest: - ignore-missing: true + min-occurrences: 3 + linters: enable-all: true disable: - maligned + - unparam - lll + - gochecknoinits - gochecknoglobals + - funlen - godox - gocognit - whitespace - wsl - - funlen - - gochecknoglobals - - gochecknoinits - - scopelint - wrapcheck - - exhaustivestruct - - exhaustive - - nlreturn - testpackage - - gci - - gofumpt - - goerr113 + - nlreturn - gomnd - - tparallel + - exhaustivestruct + - goerr113 + - errorlint - nestif - godot - - errorlint - - varcheck - - interfacer - - deadcode - - golint + - gofumpt + - paralleltest + - tparallel + - thelper - ifshort - - structcheck - - nosnakecase - - varnamelen - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/jsonreference/README.md b/vendor/github.com/go-openapi/jsonreference/README.md index b94753aa5..c7fc2049c 100644 --- a/vendor/github.com/go-openapi/jsonreference/README.md +++ b/vendor/github.com/go-openapi/jsonreference/README.md @@ -1,15 +1,19 @@ -# gojsonreference [![Build Status](https://travis-ci.org/go-openapi/jsonreference.svg?branch=master)](https://travis-ci.org/go-openapi/jsonreference) [![codecov](https://codecov.io/gh/go-openapi/jsonreference/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/jsonreference) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +# gojsonreference [![Build Status](https://github.com/go-openapi/jsonreference/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/jsonreference/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/jsonreference/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/jsonreference) + +[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/jsonreference/master/LICENSE) +[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/jsonreference.svg)](https://pkg.go.dev/github.com/go-openapi/jsonreference) +[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/jsonreference)](https://goreportcard.com/report/github.com/go-openapi/jsonreference) -[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/jsonreference/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/jsonreference?status.svg)](http://godoc.org/github.com/go-openapi/jsonreference) An implementation of JSON Reference - Go language ## Status Feature complete. Stable API ## Dependencies -https://github.com/go-openapi/jsonpointer +* https://github.com/go-openapi/jsonpointer ## References -http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07 -http://tools.ietf.org/html/draft-pbryan-zyp-json-ref-03 +* http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07 +* http://tools.ietf.org/html/draft-pbryan-zyp-json-ref-03 diff --git a/vendor/github.com/go-openapi/loads/.golangci.yml b/vendor/github.com/go-openapi/loads/.golangci.yml index d48b4a515..22f8d21cc 100644 --- a/vendor/github.com/go-openapi/loads/.golangci.yml +++ b/vendor/github.com/go-openapi/loads/.golangci.yml @@ -4,41 +4,58 @@ linters-settings: golint: min-confidence: 0 gocyclo: - min-complexity: 30 + min-complexity: 45 maligned: suggest-new: true dupl: - threshold: 100 + threshold: 200 goconst: min-len: 2 - min-occurrences: 4 + min-occurrences: 3 linters: enable-all: true disable: - maligned + - unparam - lll - - gochecknoglobals - gochecknoinits + - gochecknoglobals + - funlen - godox - gocognit - whitespace - wsl - - funlen - - gochecknoglobals - - gochecknoinits - - scopelint - wrapcheck - - exhaustivestruct - - exhaustive - - nlreturn - testpackage - - gci - - gofumpt - - goerr113 + - nlreturn - gomnd - - tparallel + - exhaustivestruct + - goerr113 + - errorlint - nestif - godot - - errorlint + - gofumpt - paralleltest + - tparallel + - thelper + - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/loads/README.md b/vendor/github.com/go-openapi/loads/README.md index df1f62646..f8bd440df 100644 --- a/vendor/github.com/go-openapi/loads/README.md +++ b/vendor/github.com/go-openapi/loads/README.md @@ -1,4 +1,4 @@ -# Loads OAI specs [![Build Status](https://travis-ci.org/go-openapi/loads.svg?branch=master)](https://travis-ci.org/go-openapi/loads) [![codecov](https://codecov.io/gh/go-openapi/loads/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/loads) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) [![Actions/Go Test Status](https://github.com/go-openapi/loads/workflows/Go%20Test/badge.svg)](https://github.com/go-openapi/loads/actions?query=workflow%3A"Go+Test") +# Loads OAI specs [![Build Status](https://github.com/go-openapi/loads/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/loads/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/loads/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/loads) [![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/loads/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/loads?status.svg)](http://godoc.org/github.com/go-openapi/loads) [![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/loads)](https://goreportcard.com/report/github.com/go-openapi/loads) diff --git a/vendor/github.com/go-openapi/loads/doc.go b/vendor/github.com/go-openapi/loads/doc.go index 3046da4ce..5bcaef5db 100644 --- a/vendor/github.com/go-openapi/loads/doc.go +++ b/vendor/github.com/go-openapi/loads/doc.go @@ -12,10 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -/* -Package loads provides document loading methods for swagger (OAI) specifications. - -It is used by other go-openapi packages to load and run analysis on local or remote spec documents. - -*/ +// Package loads provides document loading methods for swagger (OAI) specifications. +// +// It is used by other go-openapi packages to load and run analysis on local or remote spec documents. package loads diff --git a/vendor/github.com/go-openapi/loads/loaders.go b/vendor/github.com/go-openapi/loads/loaders.go index 44bd32b5b..b2d1e034c 100644 --- a/vendor/github.com/go-openapi/loads/loaders.go +++ b/vendor/github.com/go-openapi/loads/loaders.go @@ -21,7 +21,7 @@ func init() { jsonLoader := &loader{ DocLoaderWithMatch: DocLoaderWithMatch{ - Match: func(pth string) bool { + Match: func(_ string) bool { return true }, Fn: JSONDoc, @@ -86,7 +86,7 @@ func (l *loader) Load(path string) (json.RawMessage, error) { return nil, erp } - var lastErr error = errors.New("no loader matched") // default error if no match was found + lastErr := errors.New("no loader matched") // default error if no match was found for ldr := l; ldr != nil; ldr = ldr.Next { if ldr.Match != nil && !ldr.Match(path) { continue @@ -118,9 +118,8 @@ func JSONDoc(path string) (json.RawMessage, error) { // This sets the configuration at the package level. // // NOTE: -// * this updates the default loader used by github.com/go-openapi/spec -// * since this sets package level globals, you shouln't call this concurrently -// +// - this updates the default loader used by github.com/go-openapi/spec +// - since this sets package level globals, you shouln't call this concurrently func AddLoader(predicate DocMatcher, load DocLoader) { loaders = loaders.WithHead(&loader{ DocLoaderWithMatch: DocLoaderWithMatch{ diff --git a/vendor/github.com/go-openapi/loads/spec.go b/vendor/github.com/go-openapi/loads/spec.go index 93c8d4b89..c9039cd5d 100644 --- a/vendor/github.com/go-openapi/loads/spec.go +++ b/vendor/github.com/go-openapi/loads/spec.go @@ -38,8 +38,8 @@ type Document struct { specFilePath string origSpec *spec.Swagger schema *spec.Schema - raw json.RawMessage pathLoader *loader + raw json.RawMessage } // JSONSpec loads a spec from a json document @@ -49,7 +49,14 @@ func JSONSpec(path string, options ...LoaderOption) (*Document, error) { return nil, err } // convert to json - return Analyzed(data, "", options...) + doc, err := Analyzed(data, "", options...) + if err != nil { + return nil, err + } + + doc.specFilePath = path + + return doc, nil } // Embedded returns a Document based on embedded specs. No analysis is required @@ -71,7 +78,6 @@ func Embedded(orig, flat json.RawMessage, options ...LoaderOption) (*Document, e // Spec loads a new spec document from a local or remote path func Spec(path string, options ...LoaderOption) (*Document, error) { - ldr := loaderFromOptions(options) b, err := ldr.Load(path) @@ -84,12 +90,10 @@ func Spec(path string, options ...LoaderOption) (*Document, error) { return nil, err } - if document != nil { - document.specFilePath = path - document.pathLoader = ldr - } + document.specFilePath = path + document.pathLoader = ldr - return document, err + return document, nil } // Analyzed creates a new analyzed spec document for a root json.RawMessage. @@ -117,7 +121,7 @@ func Analyzed(data json.RawMessage, version string, options ...LoaderOption) (*D } d := &Document{ - Analyzer: analysis.New(swspec), + Analyzer: analysis.New(swspec), // NOTE: at this moment, analysis does not follow $refs to documents outside the root doc schema: spec.MustLoadSwagger20Schema(), spec: swspec, raw: raw, @@ -152,9 +156,8 @@ func trimData(in json.RawMessage) (json.RawMessage, error) { return d, nil } -// Expanded expands the ref fields in the spec document and returns a new spec document +// Expanded expands the $ref fields in the spec document and returns a new spec document func (d *Document) Expanded(options ...*spec.ExpandOptions) (*Document, error) { - swspec := new(spec.Swagger) if err := json.Unmarshal(d.raw, swspec); err != nil { return nil, err @@ -163,6 +166,9 @@ func (d *Document) Expanded(options ...*spec.ExpandOptions) (*Document, error) { var expandOptions *spec.ExpandOptions if len(options) > 0 { expandOptions = options[0] + if expandOptions.RelativeBase == "" { + expandOptions.RelativeBase = d.specFilePath + } } else { expandOptions = &spec.ExpandOptions{ RelativeBase: d.specFilePath, @@ -194,7 +200,7 @@ func (d *Document) Expanded(options ...*spec.ExpandOptions) (*Document, error) { return dd, nil } -// BasePath the base path for this spec +// BasePath the base path for the API specified by this spec func (d *Document) BasePath() string { return d.spec.BasePath } @@ -242,8 +248,11 @@ func (d *Document) ResetDefinitions() *Document { // Pristine creates a new pristine document instance based on the input data func (d *Document) Pristine() *Document { - dd, _ := Analyzed(d.Raw(), d.Version()) + raw, _ := json.Marshal(d.Spec()) + dd, _ := Analyzed(raw, d.Version()) dd.pathLoader = d.pathLoader + dd.specFilePath = d.specFilePath + return dd } diff --git a/vendor/github.com/go-openapi/runtime/.golangci.yml b/vendor/github.com/go-openapi/runtime/.golangci.yml index b1aa7928a..1c75557ba 100644 --- a/vendor/github.com/go-openapi/runtime/.golangci.yml +++ b/vendor/github.com/go-openapi/runtime/.golangci.yml @@ -1,44 +1,62 @@ linters-settings: govet: - # Using err repeatedly considered as shadowing. - check-shadowing: false + check-shadowing: true golint: min-confidence: 0 gocyclo: - min-complexity: 30 + min-complexity: 45 maligned: suggest-new: true dupl: - threshold: 100 + threshold: 200 goconst: min-len: 2 - min-occurrences: 4 + min-occurrences: 3 + linters: + enable-all: true disable: + - nilerr # nilerr crashes on this repo - maligned + - unparam - lll + - gochecknoinits - gochecknoglobals + - funlen - godox - gocognit - whitespace - wsl - - funlen - - gochecknoglobals - - gochecknoinits - - scopelint - wrapcheck - - exhaustivestruct - - exhaustive - - nlreturn - testpackage - - gci - - gofumpt - - goerr113 + - nlreturn - gomnd - - tparallel + - exhaustivestruct + - goerr113 + - errorlint - nestif - godot - - errorlint - - noctx + - gofumpt + - paralleltest + - tparallel + - thelper + - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode - interfacer - - nilerr + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/runtime/README.md b/vendor/github.com/go-openapi/runtime/README.md index 5b1ec6494..b07e0ad9d 100644 --- a/vendor/github.com/go-openapi/runtime/README.md +++ b/vendor/github.com/go-openapi/runtime/README.md @@ -1,7 +1,10 @@ -# runtime [![Build Status](https://travis-ci.org/go-openapi/runtime.svg?branch=client-context)](https://travis-ci.org/go-openapi/runtime) [![codecov](https://codecov.io/gh/go-openapi/runtime/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/runtime) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +# runtime [![Build Status](https://github.com/go-openapi/runtime/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/runtime/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/runtime/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/runtime) -[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/runtime/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/runtime?status.svg)](http://godoc.org/github.com/go-openapi/runtime) +[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/runtime/master/LICENSE) +[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/runtime.svg)](https://pkg.go.dev/github.com/go-openapi/runtime) +[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/runtime)](https://goreportcard.com/report/github.com/go-openapi/runtime) -# golang Open-API toolkit - runtime +# go OpenAPI toolkit runtime -The runtime component for use in codegeneration or as untyped usage. +The runtime component for use in code generation or as untyped usage. diff --git a/vendor/github.com/go-openapi/runtime/bytestream.go b/vendor/github.com/go-openapi/runtime/bytestream.go index 6eb6ceb5c..f8fb48223 100644 --- a/vendor/github.com/go-openapi/runtime/bytestream.go +++ b/vendor/github.com/go-openapi/runtime/bytestream.go @@ -38,9 +38,16 @@ type byteStreamOpts struct { Close bool } -// ByteStreamConsumer creates a consumer for byte streams, -// takes a Writer/BinaryUnmarshaler interface or binary slice by reference, -// and reads from the provided reader +// ByteStreamConsumer creates a consumer for byte streams. +// +// The consumer consumes from a provided reader into the data passed by reference. +// +// Supported output underlying types and interfaces, prioritized in this order: +// - io.ReaderFrom (for maximum control) +// - io.Writer (performs io.Copy) +// - encoding.BinaryUnmarshaler +// - *string +// - *[]byte func ByteStreamConsumer(opts ...byteStreamOpt) Consumer { var vals byteStreamOpts for _, opt := range opts { @@ -51,44 +58,70 @@ func ByteStreamConsumer(opts ...byteStreamOpt) Consumer { if reader == nil { return errors.New("ByteStreamConsumer requires a reader") // early exit } + if data == nil { + return errors.New("nil destination for ByteStreamConsumer") + } - close := defaultCloser + closer := defaultCloser if vals.Close { - if cl, ok := reader.(io.Closer); ok { - close = cl.Close + if cl, isReaderCloser := reader.(io.Closer); isReaderCloser { + closer = cl.Close } } - //nolint:errcheck // closing a reader wouldn't fail. - defer close() + defer func() { + _ = closer() + }() - if wrtr, ok := data.(io.Writer); ok { - _, err := io.Copy(wrtr, reader) + if readerFrom, isReaderFrom := data.(io.ReaderFrom); isReaderFrom { + _, err := readerFrom.ReadFrom(reader) return err } - buf := new(bytes.Buffer) + if writer, isDataWriter := data.(io.Writer); isDataWriter { + _, err := io.Copy(writer, reader) + return err + } + + // buffers input before writing to data + var buf bytes.Buffer _, err := buf.ReadFrom(reader) if err != nil { return err } b := buf.Bytes() - if bu, ok := data.(encoding.BinaryUnmarshaler); ok { - return bu.UnmarshalBinary(b) - } + switch destinationPointer := data.(type) { + case encoding.BinaryUnmarshaler: + return destinationPointer.UnmarshalBinary(b) + case *any: + switch (*destinationPointer).(type) { + case string: + *destinationPointer = string(b) + + return nil + + case []byte: + *destinationPointer = b - if data != nil { - if str, ok := data.(*string); ok { - *str = string(b) return nil } - } + default: + // check for the underlying type to be pointer to []byte or string, + if ptr := reflect.TypeOf(data); ptr.Kind() != reflect.Ptr { + return errors.New("destination must be a pointer") + } - if t := reflect.TypeOf(data); data != nil && t.Kind() == reflect.Ptr { v := reflect.Indirect(reflect.ValueOf(data)) - if t = v.Type(); t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8 { + t := v.Type() + + switch { + case t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8: v.SetBytes(b) return nil + + case t.Kind() == reflect.String: + v.SetString(string(b)) + return nil } } @@ -97,67 +130,87 @@ func ByteStreamConsumer(opts ...byteStreamOpt) Consumer { }) } -// ByteStreamProducer creates a producer for byte streams, -// takes a Reader/BinaryMarshaler interface or binary slice, -// and writes to a writer (essentially a pipe) +// ByteStreamProducer creates a producer for byte streams. +// +// The producer takes input data then writes to an output writer (essentially as a pipe). +// +// Supported input underlying types and interfaces, prioritized in this order: +// - io.WriterTo (for maximum control) +// - io.Reader (performs io.Copy). A ReadCloser is closed before exiting. +// - encoding.BinaryMarshaler +// - error (writes as a string) +// - []byte +// - string +// - struct, other slices: writes as JSON func ByteStreamProducer(opts ...byteStreamOpt) Producer { var vals byteStreamOpts for _, opt := range opts { opt(&vals) } + return ProducerFunc(func(writer io.Writer, data interface{}) error { if writer == nil { return errors.New("ByteStreamProducer requires a writer") // early exit } - close := defaultCloser + if data == nil { + return errors.New("nil data for ByteStreamProducer") + } + + closer := defaultCloser if vals.Close { - if cl, ok := writer.(io.Closer); ok { - close = cl.Close + if cl, isWriterCloser := writer.(io.Closer); isWriterCloser { + closer = cl.Close } } - //nolint:errcheck // TODO: closing a writer would fail. - defer close() + defer func() { + _ = closer() + }() - if rc, ok := data.(io.ReadCloser); ok { + if rc, isDataCloser := data.(io.ReadCloser); isDataCloser { defer rc.Close() } - if rdr, ok := data.(io.Reader); ok { - _, err := io.Copy(writer, rdr) + switch origin := data.(type) { + case io.WriterTo: + _, err := origin.WriteTo(writer) return err - } - if bm, ok := data.(encoding.BinaryMarshaler); ok { - bytes, err := bm.MarshalBinary() + case io.Reader: + _, err := io.Copy(writer, origin) + return err + + case encoding.BinaryMarshaler: + bytes, err := origin.MarshalBinary() if err != nil { return err } _, err = writer.Write(bytes) return err - } - if data != nil { - if str, ok := data.(string); ok { - _, err := writer.Write([]byte(str)) - return err - } - - if e, ok := data.(error); ok { - _, err := writer.Write([]byte(e.Error())) - return err - } + case error: + _, err := writer.Write([]byte(origin.Error())) + return err + default: v := reflect.Indirect(reflect.ValueOf(data)) - if t := v.Type(); t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8 { + t := v.Type() + + switch { + case t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8: _, err := writer.Write(v.Bytes()) return err - } - if t := v.Type(); t.Kind() == reflect.Struct || t.Kind() == reflect.Slice { + + case t.Kind() == reflect.String: + _, err := writer.Write([]byte(v.String())) + return err + + case t.Kind() == reflect.Struct || t.Kind() == reflect.Slice: b, err := swag.WriteJSON(data) if err != nil { return err } + _, err = writer.Write(b) return err } diff --git a/vendor/github.com/go-openapi/runtime/client_operation.go b/vendor/github.com/go-openapi/runtime/client_operation.go index fa21eacf3..5a5d63563 100644 --- a/vendor/github.com/go-openapi/runtime/client_operation.go +++ b/vendor/github.com/go-openapi/runtime/client_operation.go @@ -30,12 +30,12 @@ type ClientOperation struct { AuthInfo ClientAuthInfoWriter Params ClientRequestWriter Reader ClientResponseReader - Context context.Context + Context context.Context //nolint:containedctx // we precisely want this type to contain the request context Client *http.Client } // A ClientTransport implementor knows how to submit Request objects to some destination type ClientTransport interface { - //Submit(string, RequestWriter, ResponseReader, AuthInfoWriter) (interface{}, error) + // Submit(string, RequestWriter, ResponseReader, AuthInfoWriter) (interface{}, error) Submit(*ClientOperation) (interface{}, error) } diff --git a/vendor/github.com/go-openapi/runtime/client_request.go b/vendor/github.com/go-openapi/runtime/client_request.go index d4d2b58f2..4ebb2deab 100644 --- a/vendor/github.com/go-openapi/runtime/client_request.go +++ b/vendor/github.com/go-openapi/runtime/client_request.go @@ -37,8 +37,8 @@ type ClientRequestWriter interface { } // ClientRequest is an interface for things that know how to -// add information to a swagger client request -type ClientRequest interface { +// add information to a swagger client request. +type ClientRequest interface { //nolint:interfacebloat // a swagger-capable request is quite rich, hence the many getter/setters SetHeaderParam(string, ...string) error GetHeaderParams() http.Header diff --git a/vendor/github.com/go-openapi/runtime/csv.go b/vendor/github.com/go-openapi/runtime/csv.go index d807bd915..c9597bcd6 100644 --- a/vendor/github.com/go-openapi/runtime/csv.go +++ b/vendor/github.com/go-openapi/runtime/csv.go @@ -16,62 +16,335 @@ import ( "bytes" + "context" + "encoding" "encoding/csv" "errors" + "fmt" "io" + "reflect" + + "golang.org/x/sync/errgroup" ) -// CSVConsumer creates a new CSV consumer -func CSVConsumer() Consumer { +// CSVConsumer creates a new CSV consumer. +// +// The consumer consumes CSV records from a provided reader into the data passed by reference. +// +// CSVOpts options may be specified to alter the default CSV behavior on the reader and the writer side (e.g. separator, skip header, ...). +// The defaults are those of the standard library's csv.Reader and csv.Writer. +// +// Supported output underlying types and interfaces, prioritized in this order: +// - *csv.Writer +// - CSVWriter (writer options are ignored) +// - io.Writer (as raw bytes) +// - io.ReaderFrom (as raw bytes) +// - encoding.BinaryUnmarshaler (as raw bytes) +// - *[][]string (as a collection of records) +// - *[]byte (as raw bytes) +// - *string (a raw bytes) +// +// The consumer prioritizes situations where buffering the input is not required. +func CSVConsumer(opts ...CSVOpt) Consumer { + o := csvOptsWithDefaults(opts) + return ConsumerFunc(func(reader io.Reader, data interface{}) error { if reader == nil { return errors.New("CSVConsumer requires a reader") } + if data == nil { + return errors.New("nil destination for CSVConsumer") + } csvReader := csv.NewReader(reader) - writer, ok := data.(io.Writer) - if !ok { - return errors.New("data type must be io.Writer") - } - csvWriter := csv.NewWriter(writer) - records, err := csvReader.ReadAll() - if err != nil { - return err - } - for _, r := range records { - if err := csvWriter.Write(r); err != nil { - return err + o.applyToReader(csvReader) + closer := defaultCloser + if o.closeStream { + if cl, isReaderCloser := reader.(io.Closer); isReaderCloser { + closer = cl.Close + } + } + defer func() { + _ = closer() + }() + + switch destination := data.(type) { + case *csv.Writer: + csvWriter := destination + o.applyToWriter(csvWriter) + + return pipeCSV(csvWriter, csvReader, o) + + case CSVWriter: + csvWriter := destination + // no writer options available + + return pipeCSV(csvWriter, csvReader, o) + + case io.Writer: + csvWriter := csv.NewWriter(destination) + o.applyToWriter(csvWriter) + + return pipeCSV(csvWriter, csvReader, o) + + case io.ReaderFrom: + var buf bytes.Buffer + csvWriter := csv.NewWriter(&buf) + o.applyToWriter(csvWriter) + if err := bufferedCSV(csvWriter, csvReader, o); err != nil { + return err + } + _, err := destination.ReadFrom(&buf) + + return err + + case encoding.BinaryUnmarshaler: + var buf bytes.Buffer + csvWriter := csv.NewWriter(&buf) + o.applyToWriter(csvWriter) + if err := bufferedCSV(csvWriter, csvReader, o); err != nil { + return err + } + + return destination.UnmarshalBinary(buf.Bytes()) + + default: + // support *[][]string, *[]byte, *string + if ptr := reflect.TypeOf(data); ptr.Kind() != reflect.Ptr { + return errors.New("destination must be a pointer") + } + + v := reflect.Indirect(reflect.ValueOf(data)) + t := v.Type() + + switch { + case t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Slice && t.Elem().Elem().Kind() == reflect.String: + csvWriter := &csvRecordsWriter{} + // writer options are ignored + if err := pipeCSV(csvWriter, csvReader, o); err != nil { + return err + } + + v.Grow(len(csvWriter.records)) + v.SetCap(len(csvWriter.records)) // in case Grow was unnessary, trim down the capacity + v.SetLen(len(csvWriter.records)) + reflect.Copy(v, reflect.ValueOf(csvWriter.records)) + + return nil + + case t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8: + var buf bytes.Buffer + csvWriter := csv.NewWriter(&buf) + o.applyToWriter(csvWriter) + if err := bufferedCSV(csvWriter, csvReader, o); err != nil { + return err + } + v.SetBytes(buf.Bytes()) + + return nil + + case t.Kind() == reflect.String: + var buf bytes.Buffer + csvWriter := csv.NewWriter(&buf) + o.applyToWriter(csvWriter) + if err := bufferedCSV(csvWriter, csvReader, o); err != nil { + return err + } + v.SetString(buf.String()) + + return nil + + default: + return fmt.Errorf("%v (%T) is not supported by the CSVConsumer, %s", + data, data, "can be resolved by supporting CSVWriter/Writer/BinaryUnmarshaler interface", + ) } } - csvWriter.Flush() - return nil }) } -// CSVProducer creates a new CSV producer -func CSVProducer() Producer { +// CSVProducer creates a new CSV producer. +// +// The producer takes input data then writes as CSV to an output writer (essentially as a pipe). +// +// Supported input underlying types and interfaces, prioritized in this order: +// - *csv.Reader +// - CSVReader (reader options are ignored) +// - io.Reader +// - io.WriterTo +// - encoding.BinaryMarshaler +// - [][]string +// - []byte +// - string +// +// The producer prioritizes situations where buffering the input is not required. +func CSVProducer(opts ...CSVOpt) Producer { + o := csvOptsWithDefaults(opts) + return ProducerFunc(func(writer io.Writer, data interface{}) error { if writer == nil { return errors.New("CSVProducer requires a writer") } - - dataBytes, ok := data.([]byte) - if !ok { - return errors.New("data type must be byte array") + if data == nil { + return errors.New("nil data for CSVProducer") } - csvReader := csv.NewReader(bytes.NewBuffer(dataBytes)) - records, err := csvReader.ReadAll() - if err != nil { - return err - } csvWriter := csv.NewWriter(writer) - for _, r := range records { - if err := csvWriter.Write(r); err != nil { - return err + o.applyToWriter(csvWriter) + closer := defaultCloser + if o.closeStream { + if cl, isWriterCloser := writer.(io.Closer); isWriterCloser { + closer = cl.Close + } + } + defer func() { + _ = closer() + }() + + if rc, isDataCloser := data.(io.ReadCloser); isDataCloser { + defer rc.Close() + } + + switch origin := data.(type) { + case *csv.Reader: + csvReader := origin + o.applyToReader(csvReader) + + return pipeCSV(csvWriter, csvReader, o) + + case CSVReader: + csvReader := origin + // no reader options available + + return pipeCSV(csvWriter, csvReader, o) + + case io.Reader: + csvReader := csv.NewReader(origin) + o.applyToReader(csvReader) + + return pipeCSV(csvWriter, csvReader, o) + + case io.WriterTo: + // async piping of the writes performed by WriteTo + r, w := io.Pipe() + csvReader := csv.NewReader(r) + o.applyToReader(csvReader) + + pipe, _ := errgroup.WithContext(context.Background()) + pipe.Go(func() error { + _, err := origin.WriteTo(w) + _ = w.Close() + return err + }) + + pipe.Go(func() error { + defer func() { + _ = r.Close() + }() + + return pipeCSV(csvWriter, csvReader, o) + }) + + return pipe.Wait() + + case encoding.BinaryMarshaler: + buf, err := origin.MarshalBinary() + if err != nil { + return err + } + rdr := bytes.NewBuffer(buf) + csvReader := csv.NewReader(rdr) + + return bufferedCSV(csvWriter, csvReader, o) + + default: + // support [][]string, []byte, string (or pointers to those) + v := reflect.Indirect(reflect.ValueOf(data)) + t := v.Type() + + switch { + case t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Slice && t.Elem().Elem().Kind() == reflect.String: + csvReader := &csvRecordsWriter{ + records: make([][]string, v.Len()), + } + reflect.Copy(reflect.ValueOf(csvReader.records), v) + + return pipeCSV(csvWriter, csvReader, o) + + case t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8: + buf := bytes.NewBuffer(v.Bytes()) + csvReader := csv.NewReader(buf) + o.applyToReader(csvReader) + + return bufferedCSV(csvWriter, csvReader, o) + + case t.Kind() == reflect.String: + buf := bytes.NewBufferString(v.String()) + csvReader := csv.NewReader(buf) + o.applyToReader(csvReader) + + return bufferedCSV(csvWriter, csvReader, o) + + default: + return fmt.Errorf("%v (%T) is not supported by the CSVProducer, %s", + data, data, "can be resolved by supporting CSVReader/Reader/BinaryMarshaler interface", + ) } } - csvWriter.Flush() - return nil }) } + +// pipeCSV copies CSV records from a CSV reader to a CSV writer +func pipeCSV(csvWriter CSVWriter, csvReader CSVReader, opts csvOpts) error { + for ; opts.skippedLines > 0; opts.skippedLines-- { + _, err := csvReader.Read() + if err != nil { + if errors.Is(err, io.EOF) { + return nil + } + + return err + } + } + + for { + record, err := csvReader.Read() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + + return err + } + + if err := csvWriter.Write(record); err != nil { + return err + } + } + + csvWriter.Flush() + + return csvWriter.Error() +} + +// bufferedCSV copies CSV records from a CSV reader to a CSV writer, +// by first reading all records then writing them at once. +func bufferedCSV(csvWriter *csv.Writer, csvReader *csv.Reader, opts csvOpts) error { + for ; opts.skippedLines > 0; opts.skippedLines-- { + _, err := csvReader.Read() + if err != nil { + if errors.Is(err, io.EOF) { + return nil + } + + return err + } + } + + records, err := csvReader.ReadAll() + if err != nil { + return err + } + + return csvWriter.WriteAll(records) +} diff --git a/vendor/github.com/go-openapi/runtime/csv_options.go b/vendor/github.com/go-openapi/runtime/csv_options.go new file mode 100644 index 000000000..c16464c57 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/csv_options.go @@ -0,0 +1,121 @@ +package runtime + +import ( + "encoding/csv" + "io" +) + +// CSVOpts alter the behavior of the CSV consumer or producer. +type CSVOpt func(*csvOpts) + +type csvOpts struct { + csvReader csv.Reader + csvWriter csv.Writer + skippedLines int + closeStream bool +} + +// WithCSVReaderOpts specifies the options to csv.Reader +// when reading CSV. +func WithCSVReaderOpts(reader csv.Reader) CSVOpt { + return func(o *csvOpts) { + o.csvReader = reader + } +} + +// WithCSVWriterOpts specifies the options to csv.Writer +// when writing CSV. +func WithCSVWriterOpts(writer csv.Writer) CSVOpt { + return func(o *csvOpts) { + o.csvWriter = writer + } +} + +// WithCSVSkipLines will skip header lines. +func WithCSVSkipLines(skipped int) CSVOpt { + return func(o *csvOpts) { + o.skippedLines = skipped + } +} + +func WithCSVClosesStream() CSVOpt { + return func(o *csvOpts) { + o.closeStream = true + } +} + +func (o csvOpts) applyToReader(in *csv.Reader) { + if o.csvReader.Comma != 0 { + in.Comma = o.csvReader.Comma + } + if o.csvReader.Comment != 0 { + in.Comment = o.csvReader.Comment + } + if o.csvReader.FieldsPerRecord != 0 { + in.FieldsPerRecord = o.csvReader.FieldsPerRecord + } + + in.LazyQuotes = o.csvReader.LazyQuotes + in.TrimLeadingSpace = o.csvReader.TrimLeadingSpace + in.ReuseRecord = o.csvReader.ReuseRecord +} + +func (o csvOpts) applyToWriter(in *csv.Writer) { + if o.csvWriter.Comma != 0 { + in.Comma = o.csvWriter.Comma + } + in.UseCRLF = o.csvWriter.UseCRLF +} + +func csvOptsWithDefaults(opts []CSVOpt) csvOpts { + var o csvOpts + for _, apply := range opts { + apply(&o) + } + + return o +} + +type CSVWriter interface { + Write([]string) error + Flush() + Error() error +} + +type CSVReader interface { + Read() ([]string, error) +} + +var ( + _ CSVWriter = &csvRecordsWriter{} + _ CSVReader = &csvRecordsWriter{} +) + +// csvRecordsWriter is an internal container to move CSV records back and forth +type csvRecordsWriter struct { + i int + records [][]string +} + +func (w *csvRecordsWriter) Write(record []string) error { + w.records = append(w.records, record) + + return nil +} + +func (w *csvRecordsWriter) Read() ([]string, error) { + if w.i >= len(w.records) { + return nil, io.EOF + } + defer func() { + w.i++ + }() + + return w.records[w.i], nil +} + +func (w *csvRecordsWriter) Flush() {} + +func (w *csvRecordsWriter) Error() error { + return nil +} diff --git a/vendor/github.com/go-openapi/runtime/logger/standard.go b/vendor/github.com/go-openapi/runtime/logger/standard.go index f7e67ebb9..30035a777 100644 --- a/vendor/github.com/go-openapi/runtime/logger/standard.go +++ b/vendor/github.com/go-openapi/runtime/logger/standard.go @@ -5,6 +5,8 @@ "os" ) +var _ Logger = StandardLogger{} + type StandardLogger struct{} func (StandardLogger) Printf(format string, args ...interface{}) { diff --git a/vendor/github.com/go-openapi/runtime/middleware/context.go b/vendor/github.com/go-openapi/runtime/middleware/context.go index d21ae4e87..44cecf118 100644 --- a/vendor/github.com/go-openapi/runtime/middleware/context.go +++ b/vendor/github.com/go-openapi/runtime/middleware/context.go @@ -18,6 +18,8 @@ stdContext "context" "fmt" "net/http" + "net/url" + "path" "strings" "sync" @@ -35,12 +37,21 @@ // Debug when true turns on verbose logging var Debug = logger.DebugEnabled() + +// Logger is the standard libray logger used for printing debug messages var Logger logger.Logger = logger.StandardLogger{} -func debugLog(format string, args ...interface{}) { - if Debug { - Logger.Printf(format, args...) +func debugLogfFunc(lg logger.Logger) func(string, ...any) { + if logger.DebugEnabled() { + if lg == nil { + return Logger.Debugf + } + + return lg.Debugf } + + // muted logger + return func(_ string, _ ...any) {} } // A Builder can create middlewares @@ -73,10 +84,11 @@ func (fn ResponderFunc) WriteResponse(rw http.ResponseWriter, pr runtime.Produce // used throughout to store request context with the standard context attached // to the http.Request type Context struct { - spec *loads.Document - analyzer *analysis.Spec - api RoutableAPI - router Router + spec *loads.Document + analyzer *analysis.Spec + api RoutableAPI + router Router + debugLogf func(string, ...any) // a logging function to debug context and all components using it } type routableUntypedAPI struct { @@ -162,7 +174,7 @@ func (r *routableUntypedAPI) HandlerFor(method, path string) (http.Handler, bool r.hlock.Unlock() return handler, ok } -func (r *routableUntypedAPI) ServeErrorFor(operationID string) func(http.ResponseWriter, *http.Request, error) { +func (r *routableUntypedAPI) ServeErrorFor(_ string) func(http.ResponseWriter, *http.Request, error) { return r.api.ServeError } func (r *routableUntypedAPI) ConsumersFor(mediaTypes []string) map[string]runtime.Consumer { @@ -189,7 +201,9 @@ func (r *routableUntypedAPI) DefaultConsumes() string { return r.defaultConsumes } -// NewRoutableContext creates a new context for a routable API +// NewRoutableContext creates a new context for a routable API. +// +// If a nil Router is provided, the DefaultRouter (denco-based) will be used. func NewRoutableContext(spec *loads.Document, routableAPI RoutableAPI, routes Router) *Context { var an *analysis.Spec if spec != nil { @@ -199,26 +213,40 @@ func NewRoutableContext(spec *loads.Document, routableAPI RoutableAPI, routes Ro return NewRoutableContextWithAnalyzedSpec(spec, an, routableAPI, routes) } -// NewRoutableContextWithAnalyzedSpec is like NewRoutableContext but takes in input the analysed spec too +// NewRoutableContextWithAnalyzedSpec is like NewRoutableContext but takes as input an already analysed spec. +// +// If a nil Router is provided, the DefaultRouter (denco-based) will be used. func NewRoutableContextWithAnalyzedSpec(spec *loads.Document, an *analysis.Spec, routableAPI RoutableAPI, routes Router) *Context { // Either there are no spec doc and analysis, or both of them. if !((spec == nil && an == nil) || (spec != nil && an != nil)) { panic(errors.New(http.StatusInternalServerError, "routable context requires either both spec doc and analysis, or none of them")) } - ctx := &Context{spec: spec, api: routableAPI, analyzer: an, router: routes} - return ctx + return &Context{ + spec: spec, + api: routableAPI, + analyzer: an, + router: routes, + debugLogf: debugLogfFunc(nil), + } } -// NewContext creates a new context wrapper +// NewContext creates a new context wrapper. +// +// If a nil Router is provided, the DefaultRouter (denco-based) will be used. func NewContext(spec *loads.Document, api *untyped.API, routes Router) *Context { var an *analysis.Spec if spec != nil { an = analysis.New(spec.Spec()) } - ctx := &Context{spec: spec, analyzer: an} + ctx := &Context{ + spec: spec, + analyzer: an, + router: routes, + debugLogf: debugLogfFunc(nil), + } ctx.api = newRoutableUntypedAPI(spec, api, ctx) - ctx.router = routes + return ctx } @@ -282,6 +310,13 @@ func (c *Context) BasePath() string { return c.spec.BasePath() } +// SetLogger allows for injecting a logger to catch debug entries. +// +// The logger is enabled in DEBUG mode only. +func (c *Context) SetLogger(lg logger.Logger) { + c.debugLogf = debugLogfFunc(lg) +} + // RequiredProduces returns the accepted content types for responses func (c *Context) RequiredProduces() []string { return c.analyzer.RequiredProduces() @@ -299,6 +334,7 @@ func (c *Context) BindValidRequest(request *http.Request, route *MatchedRoute, b if err != nil { res = append(res, err) } else { + c.debugLogf("validating content type for %q against [%s]", ct, strings.Join(route.Consumes, ", ")) if err := validateContentType(route.Consumes, ct); err != nil { res = append(res, err) } @@ -397,16 +433,16 @@ func (c *Context) ResponseFormat(r *http.Request, offers []string) (string, *htt var rCtx = r.Context() if v, ok := rCtx.Value(ctxResponseFormat).(string); ok { - debugLog("[%s %s] found response format %q in context", r.Method, r.URL.Path, v) + c.debugLogf("[%s %s] found response format %q in context", r.Method, r.URL.Path, v) return v, r } format := NegotiateContentType(r, offers, "") if format != "" { - debugLog("[%s %s] set response format %q in context", r.Method, r.URL.Path, format) + c.debugLogf("[%s %s] set response format %q in context", r.Method, r.URL.Path, format) r = r.WithContext(stdContext.WithValue(rCtx, ctxResponseFormat, format)) } - debugLog("[%s %s] negotiated response format %q", r.Method, r.URL.Path, format) + c.debugLogf("[%s %s] negotiated response format %q", r.Method, r.URL.Path, format) return format, r } @@ -469,7 +505,7 @@ func (c *Context) BindAndValidate(request *http.Request, matched *MatchedRoute) var rCtx = request.Context() if v, ok := rCtx.Value(ctxBoundParams).(*validation); ok { - debugLog("got cached validation (valid: %t)", len(v.result) == 0) + c.debugLogf("got cached validation (valid: %t)", len(v.result) == 0) if len(v.result) > 0 { return v.bound, request, errors.CompositeValidationError(v.result...) } @@ -481,7 +517,7 @@ func (c *Context) BindAndValidate(request *http.Request, matched *MatchedRoute) if len(result.result) > 0 { return result.bound, request, errors.CompositeValidationError(result.result...) } - debugLog("no validation errors found") + c.debugLogf("no validation errors found") return result.bound, request, nil } @@ -492,7 +528,7 @@ func (c *Context) NotFound(rw http.ResponseWriter, r *http.Request) { // Respond renders the response after doing some content negotiation func (c *Context) Respond(rw http.ResponseWriter, r *http.Request, produces []string, route *MatchedRoute, data interface{}) { - debugLog("responding to %s %s with produces: %v", r.Method, r.URL.Path, produces) + c.debugLogf("responding to %s %s with produces: %v", r.Method, r.URL.Path, produces) offers := []string{} for _, mt := range produces { if mt != c.api.DefaultProduces() { @@ -501,7 +537,7 @@ func (c *Context) Respond(rw http.ResponseWriter, r *http.Request, produces []st } // the default producer is last so more specific producers take precedence offers = append(offers, c.api.DefaultProduces()) - debugLog("offers: %v", offers) + c.debugLogf("offers: %v", offers) var format string format, r = c.ResponseFormat(r, offers) @@ -516,7 +552,7 @@ func (c *Context) Respond(rw http.ResponseWriter, r *http.Request, produces []st prods := c.api.ProducersFor(normalizeOffers([]string{c.api.DefaultProduces()})) pr, ok := prods[c.api.DefaultProduces()] if !ok { - panic(errors.New(http.StatusInternalServerError, "can't find a producer for "+format)) + panic(errors.New(http.StatusInternalServerError, cantFindProducer(format))) } prod = pr } @@ -542,14 +578,14 @@ func (c *Context) Respond(rw http.ResponseWriter, r *http.Request, produces []st } if route == nil || route.Operation == nil { - rw.WriteHeader(200) - if r.Method == "HEAD" { + rw.WriteHeader(http.StatusOK) + if r.Method == http.MethodHead { return } producers := c.api.ProducersFor(normalizeOffers(offers)) prod, ok := producers[format] if !ok { - panic(errors.New(http.StatusInternalServerError, "can't find a producer for "+format)) + panic(errors.New(http.StatusInternalServerError, cantFindProducer(format))) } if err := prod.Produce(rw, data); err != nil { panic(err) // let the recovery middleware deal with this @@ -559,7 +595,7 @@ func (c *Context) Respond(rw http.ResponseWriter, r *http.Request, produces []st if _, code, ok := route.Operation.SuccessResponse(); ok { rw.WriteHeader(code) - if code == 204 || r.Method == "HEAD" { + if code == http.StatusNoContent || r.Method == http.MethodHead { return } @@ -570,7 +606,7 @@ func (c *Context) Respond(rw http.ResponseWriter, r *http.Request, produces []st prods := c.api.ProducersFor(normalizeOffers([]string{c.api.DefaultProduces()})) pr, ok := prods[c.api.DefaultProduces()] if !ok { - panic(errors.New(http.StatusInternalServerError, "can't find a producer for "+format)) + panic(errors.New(http.StatusInternalServerError, cantFindProducer(format))) } prod = pr } @@ -584,45 +620,92 @@ func (c *Context) Respond(rw http.ResponseWriter, r *http.Request, produces []st c.api.ServeErrorFor(route.Operation.ID)(rw, r, errors.New(http.StatusInternalServerError, "can't produce response")) } -func (c *Context) APIHandlerSwaggerUI(builder Builder) http.Handler { +// APIHandlerSwaggerUI returns a handler to serve the API. +// +// This handler includes a swagger spec, router and the contract defined in the swagger spec. +// +// A spec UI (SwaggerUI) is served at {API base path}/docs and the spec document at /swagger.json +// (these can be modified with uiOptions). +func (c *Context) APIHandlerSwaggerUI(builder Builder, opts ...UIOption) http.Handler { b := builder if b == nil { b = PassthroughBuilder } - var title string - sp := c.spec.Spec() - if sp != nil && sp.Info != nil && sp.Info.Title != "" { - title = sp.Info.Title - } + specPath, uiOpts, specOpts := c.uiOptionsForHandler(opts) + var swaggerUIOpts SwaggerUIOpts + fromCommonToAnyOptions(uiOpts, &swaggerUIOpts) - swaggerUIOpts := SwaggerUIOpts{ - BasePath: c.BasePath(), - Title: title, - } - - return Spec("", c.spec.Raw(), SwaggerUI(swaggerUIOpts, c.RoutesHandler(b))) + return Spec(specPath, c.spec.Raw(), SwaggerUI(swaggerUIOpts, c.RoutesHandler(b)), specOpts...) } -// APIHandler returns a handler to serve the API, this includes a swagger spec, router and the contract defined in the swagger spec -func (c *Context) APIHandler(builder Builder) http.Handler { +// APIHandlerRapiDoc returns a handler to serve the API. +// +// This handler includes a swagger spec, router and the contract defined in the swagger spec. +// +// A spec UI (RapiDoc) is served at {API base path}/docs and the spec document at /swagger.json +// (these can be modified with uiOptions). +func (c *Context) APIHandlerRapiDoc(builder Builder, opts ...UIOption) http.Handler { b := builder if b == nil { b = PassthroughBuilder } + specPath, uiOpts, specOpts := c.uiOptionsForHandler(opts) + var rapidocUIOpts RapiDocOpts + fromCommonToAnyOptions(uiOpts, &rapidocUIOpts) + + return Spec(specPath, c.spec.Raw(), RapiDoc(rapidocUIOpts, c.RoutesHandler(b)), specOpts...) +} + +// APIHandler returns a handler to serve the API. +// +// This handler includes a swagger spec, router and the contract defined in the swagger spec. +// +// A spec UI (Redoc) is served at {API base path}/docs and the spec document at /swagger.json +// (these can be modified with uiOptions). +func (c *Context) APIHandler(builder Builder, opts ...UIOption) http.Handler { + b := builder + if b == nil { + b = PassthroughBuilder + } + + specPath, uiOpts, specOpts := c.uiOptionsForHandler(opts) + var redocOpts RedocOpts + fromCommonToAnyOptions(uiOpts, &redocOpts) + + return Spec(specPath, c.spec.Raw(), Redoc(redocOpts, c.RoutesHandler(b)), specOpts...) +} + +func (c Context) uiOptionsForHandler(opts []UIOption) (string, uiOptions, []SpecOption) { var title string sp := c.spec.Spec() if sp != nil && sp.Info != nil && sp.Info.Title != "" { title = sp.Info.Title } - redocOpts := RedocOpts{ - BasePath: c.BasePath(), - Title: title, + // default options (may be overridden) + optsForContext := []UIOption{ + WithUIBasePath(c.BasePath()), + WithUITitle(title), + } + optsForContext = append(optsForContext, opts...) + uiOpts := uiOptionsWithDefaults(optsForContext) + + // If spec URL is provided, there is a non-default path to serve the spec. + // This makes sure that the UI middleware is aligned with the Spec middleware. + u, _ := url.Parse(uiOpts.SpecURL) + var specPath string + if u != nil { + specPath = u.Path } - return Spec("", c.spec.Raw(), Redoc(redocOpts, c.RoutesHandler(b))) + pth, doc := path.Split(specPath) + if pth == "." { + pth = "" + } + + return pth, uiOpts, []SpecOption{WithSpecDocument(doc)} } // RoutesHandler returns a handler to serve the API, just the routes and the contract defined in the swagger spec @@ -633,3 +716,7 @@ func (c *Context) RoutesHandler(builder Builder) http.Handler { } return NewRouter(c, b(NewOperationExecutor(c))) } + +func cantFindProducer(format string) string { + return "can't find a producer for " + format +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/denco/router.go b/vendor/github.com/go-openapi/runtime/middleware/denco/router.go index 5d2691ec3..4377f77a4 100644 --- a/vendor/github.com/go-openapi/runtime/middleware/denco/router.go +++ b/vendor/github.com/go-openapi/runtime/middleware/denco/router.go @@ -2,6 +2,7 @@ package denco import ( + "errors" "fmt" "sort" "strings" @@ -29,13 +30,13 @@ // Router represents a URL router. type Router struct { + param *doubleArray // SizeHint expects the maximum number of path parameters in records to Build. // SizeHint will be used to determine the capacity of the memory to allocate. // By default, SizeHint will be determined from given records to Build. SizeHint int static map[string]interface{} - param *doubleArray } // New returns a new Router. @@ -51,7 +52,7 @@ func New() *Router { // params is a slice of the Param that arranged in the order in which parameters appeared. // e.g. when built routing path is "/path/to/:id/:name" and given path is "/path/to/1/alice". params order is [{"id": "1"}, {"name": "alice"}], not [{"name": "alice"}, {"id": "1"}]. func (rt *Router) Lookup(path string) (data interface{}, params Params, found bool) { - if data, found := rt.static[path]; found { + if data, found = rt.static[path]; found { return data, nil, true } if len(rt.param.node) == 1 { @@ -71,7 +72,7 @@ func (rt *Router) Lookup(path string) (data interface{}, params Params, found bo func (rt *Router) Build(records []Record) error { statics, params := makeRecords(records) if len(params) > MaxSize { - return fmt.Errorf("denco: too many records") + return errors.New("denco: too many records") } if rt.SizeHint < 0 { rt.SizeHint = 0 @@ -131,7 +132,8 @@ func newDoubleArray() *doubleArray { // baseCheck contains BASE, CHECK and Extra flags. // From the top, 22bits of BASE, 2bits of Extra flags and 8bits of CHECK. // -// BASE (22bit) | Extra flags (2bit) | CHECK (8bit) +// BASE (22bit) | Extra flags (2bit) | CHECK (8bit) +// // |----------------------|--|--------| // 32 10 8 0 type baseCheck uint32 @@ -196,24 +198,29 @@ func (da *doubleArray) lookup(path string, params []Param, idx int) (*node, []Pa if next := nextIndex(da.bc[idx].Base(), TerminationCharacter); next < len(da.bc) && da.bc[next].Check() == TerminationCharacter { return da.node[da.bc[next].Base()], params, true } + BACKTRACKING: for j := len(indices) - 1; j >= 0; j-- { i, idx := int(indices[j]>>32), int(indices[j]&0xffffffff) if da.bc[idx].IsSingleParam() { - idx := nextIndex(da.bc[idx].Base(), ParamCharacter) - if idx >= len(da.bc) { + nextIdx := nextIndex(da.bc[idx].Base(), ParamCharacter) + if nextIdx >= len(da.bc) { break } + next := NextSeparator(path, i) - params := append(params, Param{Value: path[i:next]}) - if nd, params, found := da.lookup(path[next:], params, idx); found { - return nd, params, true + nextParams := params + nextParams = append(nextParams, Param{Value: path[i:next]}) + if nd, nextNextParams, found := da.lookup(path[next:], nextParams, nextIdx); found { + return nd, nextNextParams, true } } + if da.bc[idx].IsWildcardParam() { - idx := nextIndex(da.bc[idx].Base(), WildcardCharacter) - params := append(params, Param{Value: path[i:]}) - return da.node[da.bc[idx].Base()], params, true + nextIdx := nextIndex(da.bc[idx].Base(), WildcardCharacter) + nextParams := params + nextParams = append(nextParams, Param{Value: path[i:]}) + return da.node[da.bc[nextIdx].Base()], nextParams, true } } return nil, nil, false @@ -325,7 +332,7 @@ func (da *doubleArray) arrange(records []*record, idx, depth int, usedBase map[i } base = da.findBase(siblings, idx, usedBase) if base > MaxSize { - return -1, nil, nil, fmt.Errorf("denco: too many elements of internal slice") + return -1, nil, nil, errors.New("denco: too many elements of internal slice") } da.setBase(idx, base) return base, siblings, leaf, err @@ -386,7 +393,7 @@ func makeSiblings(records []*record, depth int) (sib []sibling, leaf *record, er case pc == c: continue default: - return nil, nil, fmt.Errorf("denco: BUG: routing table hasn't been sorted") + return nil, nil, errors.New("denco: BUG: routing table hasn't been sorted") } if n > 0 { sib[n-1].end = i @@ -431,7 +438,7 @@ func makeRecords(srcs []Record) (statics, params []*record) { wildcardPrefix := string(SeparatorCharacter) + string(WildcardCharacter) restconfPrefix := string(PathParamCharacter) + string(ParamCharacter) for _, r := range srcs { - if strings.Contains(r.Key, paramPrefix) || strings.Contains(r.Key, wildcardPrefix) ||strings.Contains(r.Key, restconfPrefix){ + if strings.Contains(r.Key, paramPrefix) || strings.Contains(r.Key, wildcardPrefix) || strings.Contains(r.Key, restconfPrefix) { r.Key += termChar params = append(params, &record{Record: r}) } else { diff --git a/vendor/github.com/go-openapi/runtime/middleware/doc.go b/vendor/github.com/go-openapi/runtime/middleware/doc.go index eaf90606a..836a98850 100644 --- a/vendor/github.com/go-openapi/runtime/middleware/doc.go +++ b/vendor/github.com/go-openapi/runtime/middleware/doc.go @@ -12,51 +12,52 @@ // See the License for the specific language governing permissions and // limitations under the License. -/*Package middleware provides the library with helper functions for serving swagger APIs. +/* +Package middleware provides the library with helper functions for serving swagger APIs. Pseudo middleware handler - import ( - "net/http" + import ( + "net/http" - "github.com/go-openapi/errors" - ) + "github.com/go-openapi/errors" + ) - func newCompleteMiddleware(ctx *Context) http.Handler { - return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { - // use context to lookup routes - if matched, ok := ctx.RouteInfo(r); ok { + func newCompleteMiddleware(ctx *Context) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + // use context to lookup routes + if matched, ok := ctx.RouteInfo(r); ok { - if matched.NeedsAuth() { - if _, err := ctx.Authorize(r, matched); err != nil { - ctx.Respond(rw, r, matched.Produces, matched, err) - return - } - } + if matched.NeedsAuth() { + if _, err := ctx.Authorize(r, matched); err != nil { + ctx.Respond(rw, r, matched.Produces, matched, err) + return + } + } - bound, validation := ctx.BindAndValidate(r, matched) - if validation != nil { - ctx.Respond(rw, r, matched.Produces, matched, validation) - return - } + bound, validation := ctx.BindAndValidate(r, matched) + if validation != nil { + ctx.Respond(rw, r, matched.Produces, matched, validation) + return + } - result, err := matched.Handler.Handle(bound) - if err != nil { - ctx.Respond(rw, r, matched.Produces, matched, err) - return - } + result, err := matched.Handler.Handle(bound) + if err != nil { + ctx.Respond(rw, r, matched.Produces, matched, err) + return + } - ctx.Respond(rw, r, matched.Produces, matched, result) - return - } + ctx.Respond(rw, r, matched.Produces, matched, result) + return + } - // Not found, check if it exists in the other methods first - if others := ctx.AllowedMethods(r); len(others) > 0 { - ctx.Respond(rw, r, ctx.spec.RequiredProduces(), nil, errors.MethodNotAllowed(r.Method, others)) - return - } - ctx.Respond(rw, r, ctx.spec.RequiredProduces(), nil, errors.NotFound("path %s was not found", r.URL.Path)) - }) - } + // Not found, check if it exists in the other methods first + if others := ctx.AllowedMethods(r); len(others) > 0 { + ctx.Respond(rw, r, ctx.spec.RequiredProduces(), nil, errors.MethodNotAllowed(r.Method, others)) + return + } + ctx.Respond(rw, r, ctx.spec.RequiredProduces(), nil, errors.NotFound("path %s was not found", r.URL.Path)) + }) + } */ package middleware diff --git a/vendor/github.com/go-openapi/runtime/middleware/go18.go b/vendor/github.com/go-openapi/runtime/middleware/go18.go deleted file mode 100644 index 75c762c09..000000000 --- a/vendor/github.com/go-openapi/runtime/middleware/go18.go +++ /dev/null @@ -1,9 +0,0 @@ -// +build go1.8 - -package middleware - -import "net/url" - -func pathUnescape(path string) (string, error) { - return url.PathUnescape(path) -} diff --git a/vendor/github.com/go-openapi/runtime/middleware/header/header.go b/vendor/github.com/go-openapi/runtime/middleware/header/header.go index e069743e3..df073c87d 100644 --- a/vendor/github.com/go-openapi/runtime/middleware/header/header.go +++ b/vendor/github.com/go-openapi/runtime/middleware/header/header.go @@ -195,7 +195,8 @@ func ParseAccept2(header http.Header, key string) (specs []AcceptSpec) { } // ParseAccept parses Accept* headers. -func ParseAccept(header http.Header, key string) (specs []AcceptSpec) { +func ParseAccept(header http.Header, key string) []AcceptSpec { + var specs []AcceptSpec loop: for _, s := range header[key] { for { @@ -218,6 +219,7 @@ func ParseAccept(header http.Header, key string) (specs []AcceptSpec) { } } } + specs = append(specs, spec) s = skipSpace(s) if !strings.HasPrefix(s, ",") { @@ -226,7 +228,8 @@ func ParseAccept(header http.Header, key string) (specs []AcceptSpec) { s = skipSpace(s[1:]) } } - return + + return specs } func skipSpace(s string) (rest string) { @@ -306,7 +309,7 @@ func expectTokenOrQuoted(s string) (value string, rest string) { p := make([]byte, len(s)-1) j := copy(p, s[:i]) escape := true - for i = i + 1; i < len(s); i++ { + for i++; i < len(s); i++ { b := s[i] switch { case escape: diff --git a/vendor/github.com/go-openapi/runtime/middleware/parameter.go b/vendor/github.com/go-openapi/runtime/middleware/parameter.go index 9aaf65958..9c3353a95 100644 --- a/vendor/github.com/go-openapi/runtime/middleware/parameter.go +++ b/vendor/github.com/go-openapi/runtime/middleware/parameter.go @@ -34,6 +34,11 @@ const defaultMaxMemory = 32 << 20 +const ( + typeString = "string" + typeArray = "array" +) + var textUnmarshalType = reflect.TypeOf(new(encoding.TextUnmarshaler)).Elem() func newUntypedParamBinder(param spec.Parameter, spec *spec.Swagger, formats strfmt.Registry) *untypedParamBinder { @@ -66,7 +71,7 @@ func (p *untypedParamBinder) typeForSchema(tpe, format string, items *spec.Items case "boolean": return reflect.TypeOf(true) - case "string": + case typeString: if tt, ok := p.formats.GetType(format); ok { return tt } @@ -94,7 +99,7 @@ func (p *untypedParamBinder) typeForSchema(tpe, format string, items *spec.Items return reflect.TypeOf(float64(0)) } - case "array": + case typeArray: if items == nil { return nil } @@ -119,7 +124,7 @@ func (p *untypedParamBinder) allowsMulti() bool { func (p *untypedParamBinder) readValue(values runtime.Gettable, target reflect.Value) ([]string, bool, bool, error) { name, in, cf, tpe := p.parameter.Name, p.parameter.In, p.parameter.CollectionFormat, p.parameter.Type - if tpe == "array" { + if tpe == typeArray { if cf == "multi" { if !p.allowsMulti() { return nil, false, false, errors.InvalidCollectionFormat(name, in, cf) @@ -208,10 +213,11 @@ func (p *untypedParamBinder) Bind(request *http.Request, routeParams RouteParams if ffErr != nil { if p.parameter.Required { return errors.NewParseError(p.Name, p.parameter.In, "", ffErr) - } else { - return nil } + + return nil } + target.Set(reflect.ValueOf(runtime.File{Data: file, Header: header})) return nil } @@ -263,7 +269,7 @@ func (p *untypedParamBinder) Bind(request *http.Request, routeParams RouteParams } func (p *untypedParamBinder) bindValue(data []string, hasKey bool, target reflect.Value) error { - if p.parameter.Type == "array" { + if p.parameter.Type == typeArray { return p.setSliceFieldValue(target, p.parameter.Default, data, hasKey) } var d string @@ -273,7 +279,7 @@ func (p *untypedParamBinder) bindValue(data []string, hasKey bool, target reflec return p.setFieldValue(target, p.parameter.Default, d, hasKey) } -func (p *untypedParamBinder) setFieldValue(target reflect.Value, defaultValue interface{}, data string, hasKey bool) error { +func (p *untypedParamBinder) setFieldValue(target reflect.Value, defaultValue interface{}, data string, hasKey bool) error { //nolint:gocyclo tpe := p.parameter.Type if p.parameter.Format != "" { tpe = p.parameter.Format @@ -317,7 +323,7 @@ func (p *untypedParamBinder) setFieldValue(target reflect.Value, defaultValue in return nil } - switch target.Kind() { + switch target.Kind() { //nolint:exhaustive // we want to check only types that map from a swagger parameter case reflect.Bool: if data == "" { if target.CanSet() { diff --git a/vendor/github.com/go-openapi/runtime/middleware/pre_go18.go b/vendor/github.com/go-openapi/runtime/middleware/pre_go18.go deleted file mode 100644 index 03385251e..000000000 --- a/vendor/github.com/go-openapi/runtime/middleware/pre_go18.go +++ /dev/null @@ -1,9 +0,0 @@ -// +build !go1.8 - -package middleware - -import "net/url" - -func pathUnescape(path string) (string, error) { - return url.QueryUnescape(path) -} diff --git a/vendor/github.com/go-openapi/runtime/middleware/rapidoc.go b/vendor/github.com/go-openapi/runtime/middleware/rapidoc.go index 4be330d6d..ef75e7441 100644 --- a/vendor/github.com/go-openapi/runtime/middleware/rapidoc.go +++ b/vendor/github.com/go-openapi/runtime/middleware/rapidoc.go @@ -10,67 +10,57 @@ // RapiDocOpts configures the RapiDoc middlewares type RapiDocOpts struct { - // BasePath for the UI path, defaults to: / + // BasePath for the UI, defaults to: / BasePath string - // Path combines with BasePath for the full UI path, defaults to: docs + + // Path combines with BasePath to construct the path to the UI, defaults to: "docs". Path string - // SpecURL the url to find the spec for + + // SpecURL is the URL of the spec document. + // + // Defaults to: /swagger.json SpecURL string - // RapiDocURL for the js that generates the rapidoc site, defaults to: https://cdn.jsdelivr.net/npm/rapidoc/bundles/rapidoc.standalone.js - RapiDocURL string + // Title for the documentation site, default to: API documentation Title string + + // Template specifies a custom template to serve the UI + Template string + + // RapiDocURL points to the js asset that generates the rapidoc site. + // + // Defaults to https://unpkg.com/rapidoc/dist/rapidoc-min.js + RapiDocURL string } -// EnsureDefaults in case some options are missing func (r *RapiDocOpts) EnsureDefaults() { - if r.BasePath == "" { - r.BasePath = "/" - } - if r.Path == "" { - r.Path = "docs" - } - if r.SpecURL == "" { - r.SpecURL = "/swagger.json" - } + common := toCommonUIOptions(r) + common.EnsureDefaults() + fromCommonToAnyOptions(common, r) + + // rapidoc-specifics if r.RapiDocURL == "" { r.RapiDocURL = rapidocLatest } - if r.Title == "" { - r.Title = "API documentation" + if r.Template == "" { + r.Template = rapidocTemplate } } // RapiDoc creates a middleware to serve a documentation site for a swagger spec. -// This allows for altering the spec before starting the http listener. // +// This allows for altering the spec before starting the http listener. func RapiDoc(opts RapiDocOpts, next http.Handler) http.Handler { opts.EnsureDefaults() pth := path.Join(opts.BasePath, opts.Path) - tmpl := template.Must(template.New("rapidoc").Parse(rapidocTemplate)) + tmpl := template.Must(template.New("rapidoc").Parse(opts.Template)) + assets := bytes.NewBuffer(nil) + if err := tmpl.Execute(assets, opts); err != nil { + panic(fmt.Errorf("cannot execute template: %w", err)) + } - buf := bytes.NewBuffer(nil) - _ = tmpl.Execute(buf, opts) - b := buf.Bytes() - - return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { - if r.URL.Path == pth { - rw.Header().Set("Content-Type", "text/html; charset=utf-8") - rw.WriteHeader(http.StatusOK) - - _, _ = rw.Write(b) - return - } - - if next == nil { - rw.Header().Set("Content-Type", "text/plain") - rw.WriteHeader(http.StatusNotFound) - _, _ = rw.Write([]byte(fmt.Sprintf("%q not found", pth))) - return - } - next.ServeHTTP(rw, r) - }) + return serveUI(pth, assets.Bytes(), next) } const ( @@ -79,7 +69,7 @@ func RapiDoc(opts RapiDocOpts, next http.Handler) http.Handler { {{ .Title }} - + diff --git a/vendor/github.com/go-openapi/runtime/middleware/redoc.go b/vendor/github.com/go-openapi/runtime/middleware/redoc.go index 019c85429..b96b01e7f 100644 --- a/vendor/github.com/go-openapi/runtime/middleware/redoc.go +++ b/vendor/github.com/go-openapi/runtime/middleware/redoc.go @@ -10,67 +10,58 @@ // RedocOpts configures the Redoc middlewares type RedocOpts struct { - // BasePath for the UI path, defaults to: / + // BasePath for the UI, defaults to: / BasePath string - // Path combines with BasePath for the full UI path, defaults to: docs + + // Path combines with BasePath to construct the path to the UI, defaults to: "docs". Path string - // SpecURL the url to find the spec for + + // SpecURL is the URL of the spec document. + // + // Defaults to: /swagger.json SpecURL string - // RedocURL for the js that generates the redoc site, defaults to: https://cdn.jsdelivr.net/npm/redoc/bundles/redoc.standalone.js - RedocURL string + // Title for the documentation site, default to: API documentation Title string + + // Template specifies a custom template to serve the UI + Template string + + // RedocURL points to the js that generates the redoc site. + // + // Defaults to: https://cdn.jsdelivr.net/npm/redoc/bundles/redoc.standalone.js + RedocURL string } // EnsureDefaults in case some options are missing func (r *RedocOpts) EnsureDefaults() { - if r.BasePath == "" { - r.BasePath = "/" - } - if r.Path == "" { - r.Path = "docs" - } - if r.SpecURL == "" { - r.SpecURL = "/swagger.json" - } + common := toCommonUIOptions(r) + common.EnsureDefaults() + fromCommonToAnyOptions(common, r) + + // redoc-specifics if r.RedocURL == "" { r.RedocURL = redocLatest } - if r.Title == "" { - r.Title = "API documentation" + if r.Template == "" { + r.Template = redocTemplate } } // Redoc creates a middleware to serve a documentation site for a swagger spec. -// This allows for altering the spec before starting the http listener. // +// This allows for altering the spec before starting the http listener. func Redoc(opts RedocOpts, next http.Handler) http.Handler { opts.EnsureDefaults() pth := path.Join(opts.BasePath, opts.Path) - tmpl := template.Must(template.New("redoc").Parse(redocTemplate)) + tmpl := template.Must(template.New("redoc").Parse(opts.Template)) + assets := bytes.NewBuffer(nil) + if err := tmpl.Execute(assets, opts); err != nil { + panic(fmt.Errorf("cannot execute template: %w", err)) + } - buf := bytes.NewBuffer(nil) - _ = tmpl.Execute(buf, opts) - b := buf.Bytes() - - return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { - if r.URL.Path == pth { - rw.Header().Set("Content-Type", "text/html; charset=utf-8") - rw.WriteHeader(http.StatusOK) - - _, _ = rw.Write(b) - return - } - - if next == nil { - rw.Header().Set("Content-Type", "text/plain") - rw.WriteHeader(http.StatusNotFound) - _, _ = rw.Write([]byte(fmt.Sprintf("%q not found", pth))) - return - } - next.ServeHTTP(rw, r) - }) + return serveUI(pth, assets.Bytes(), next) } const ( diff --git a/vendor/github.com/go-openapi/runtime/middleware/request.go b/vendor/github.com/go-openapi/runtime/middleware/request.go index 760c37861..82e143665 100644 --- a/vendor/github.com/go-openapi/runtime/middleware/request.go +++ b/vendor/github.com/go-openapi/runtime/middleware/request.go @@ -19,10 +19,10 @@ "reflect" "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + "github.com/go-openapi/runtime/logger" "github.com/go-openapi/spec" "github.com/go-openapi/strfmt" - - "github.com/go-openapi/runtime" ) // UntypedRequestBinder binds and validates the data from a http request @@ -31,6 +31,7 @@ type UntypedRequestBinder struct { Parameters map[string]spec.Parameter Formats strfmt.Registry paramBinders map[string]*untypedParamBinder + debugLogf func(string, ...any) // a logging function to debug context and all components using it } // NewUntypedRequestBinder creates a new binder for reading a request. @@ -44,6 +45,7 @@ func NewUntypedRequestBinder(parameters map[string]spec.Parameter, spec *spec.Sw paramBinders: binders, Spec: spec, Formats: formats, + debugLogf: debugLogfFunc(nil), } } @@ -52,10 +54,10 @@ func (o *UntypedRequestBinder) Bind(request *http.Request, routeParams RoutePara val := reflect.Indirect(reflect.ValueOf(data)) isMap := val.Kind() == reflect.Map var result []error - debugLog("binding %d parameters for %s %s", len(o.Parameters), request.Method, request.URL.EscapedPath()) + o.debugLogf("binding %d parameters for %s %s", len(o.Parameters), request.Method, request.URL.EscapedPath()) for fieldName, param := range o.Parameters { binder := o.paramBinders[fieldName] - debugLog("binding parameter %s for %s %s", fieldName, request.Method, request.URL.EscapedPath()) + o.debugLogf("binding parameter %s for %s %s", fieldName, request.Method, request.URL.EscapedPath()) var target reflect.Value if !isMap { binder.Name = fieldName @@ -65,7 +67,7 @@ func (o *UntypedRequestBinder) Bind(request *http.Request, routeParams RoutePara if isMap { tpe := binder.Type() if tpe == nil { - if param.Schema.Type.Contains("array") { + if param.Schema.Type.Contains(typeArray) { tpe = reflect.TypeOf([]interface{}{}) } else { tpe = reflect.TypeOf(map[string]interface{}{}) @@ -102,3 +104,14 @@ func (o *UntypedRequestBinder) Bind(request *http.Request, routeParams RoutePara return nil } + +// SetLogger allows for injecting a logger to catch debug entries. +// +// The logger is enabled in DEBUG mode only. +func (o *UntypedRequestBinder) SetLogger(lg logger.Logger) { + o.debugLogf = debugLogfFunc(lg) +} + +func (o *UntypedRequestBinder) setDebugLogf(fn func(string, ...any)) { + o.debugLogf = fn +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/router.go b/vendor/github.com/go-openapi/runtime/middleware/router.go index 5052031c8..3a6aee90e 100644 --- a/vendor/github.com/go-openapi/runtime/middleware/router.go +++ b/vendor/github.com/go-openapi/runtime/middleware/router.go @@ -17,10 +17,12 @@ import ( "fmt" "net/http" + "net/url" fpath "path" "regexp" "strings" + "github.com/go-openapi/runtime/logger" "github.com/go-openapi/runtime/security" "github.com/go-openapi/swag" @@ -67,10 +69,10 @@ func (r RouteParams) GetOK(name string) ([]string, bool, bool) { return nil, false, false } -// NewRouter creates a new context aware router middleware +// NewRouter creates a new context-aware router middleware func NewRouter(ctx *Context, next http.Handler) http.Handler { if ctx.router == nil { - ctx.router = DefaultRouter(ctx.spec, ctx.api) + ctx.router = DefaultRouter(ctx.spec, ctx.api, WithDefaultRouterLoggerFunc(ctx.debugLogf)) } return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { @@ -103,41 +105,75 @@ type RoutableAPI interface { DefaultConsumes() string } -// Router represents a swagger aware router +// Router represents a swagger-aware router type Router interface { Lookup(method, path string) (*MatchedRoute, bool) OtherMethods(method, path string) []string } type defaultRouteBuilder struct { - spec *loads.Document - analyzer *analysis.Spec - api RoutableAPI - records map[string][]denco.Record + spec *loads.Document + analyzer *analysis.Spec + api RoutableAPI + records map[string][]denco.Record + debugLogf func(string, ...any) // a logging function to debug context and all components using it } type defaultRouter struct { - spec *loads.Document - routers map[string]*denco.Router + spec *loads.Document + routers map[string]*denco.Router + debugLogf func(string, ...any) // a logging function to debug context and all components using it } -func newDefaultRouteBuilder(spec *loads.Document, api RoutableAPI) *defaultRouteBuilder { +func newDefaultRouteBuilder(spec *loads.Document, api RoutableAPI, opts ...DefaultRouterOpt) *defaultRouteBuilder { + var o defaultRouterOpts + for _, apply := range opts { + apply(&o) + } + if o.debugLogf == nil { + o.debugLogf = debugLogfFunc(nil) // defaults to standard logger + } + return &defaultRouteBuilder{ - spec: spec, - analyzer: analysis.New(spec.Spec()), - api: api, - records: make(map[string][]denco.Record), + spec: spec, + analyzer: analysis.New(spec.Spec()), + api: api, + records: make(map[string][]denco.Record), + debugLogf: o.debugLogf, } } -// DefaultRouter creates a default implemenation of the router -func DefaultRouter(spec *loads.Document, api RoutableAPI) Router { - builder := newDefaultRouteBuilder(spec, api) +// DefaultRouterOpt allows to inject optional behavior to the default router. +type DefaultRouterOpt func(*defaultRouterOpts) + +type defaultRouterOpts struct { + debugLogf func(string, ...any) +} + +// WithDefaultRouterLogger sets the debug logger for the default router. +// +// This is enabled only in DEBUG mode. +func WithDefaultRouterLogger(lg logger.Logger) DefaultRouterOpt { + return func(o *defaultRouterOpts) { + o.debugLogf = debugLogfFunc(lg) + } +} + +// WithDefaultRouterLoggerFunc sets a logging debug method for the default router. +func WithDefaultRouterLoggerFunc(fn func(string, ...any)) DefaultRouterOpt { + return func(o *defaultRouterOpts) { + o.debugLogf = fn + } +} + +// DefaultRouter creates a default implementation of the router +func DefaultRouter(spec *loads.Document, api RoutableAPI, opts ...DefaultRouterOpt) Router { + builder := newDefaultRouteBuilder(spec, api, opts...) if spec != nil { for method, paths := range builder.analyzer.Operations() { for path, operation := range paths { fp := fpath.Join(spec.BasePath(), path) - debugLog("adding route %s %s %q", method, fp, operation.ID) + builder.debugLogf("adding route %s %s %q", method, fp, operation.ID) builder.AddRoute(method, fp, operation) } } @@ -319,24 +355,24 @@ func (m *MatchedRoute) NeedsAuth() bool { func (d *defaultRouter) Lookup(method, path string) (*MatchedRoute, bool) { mth := strings.ToUpper(method) - debugLog("looking up route for %s %s", method, path) + d.debugLogf("looking up route for %s %s", method, path) if Debug { if len(d.routers) == 0 { - debugLog("there are no known routers") + d.debugLogf("there are no known routers") } for meth := range d.routers { - debugLog("got a router for %s", meth) + d.debugLogf("got a router for %s", meth) } } if router, ok := d.routers[mth]; ok { if m, rp, ok := router.Lookup(fpath.Clean(path)); ok && m != nil { if entry, ok := m.(*routeEntry); ok { - debugLog("found a route for %s %s with %d parameters", method, path, len(entry.Parameters)) + d.debugLogf("found a route for %s %s with %d parameters", method, path, len(entry.Parameters)) var params RouteParams for _, p := range rp { - v, err := pathUnescape(p.Value) + v, err := url.PathUnescape(p.Value) if err != nil { - debugLog("failed to escape %q: %v", p.Value, err) + d.debugLogf("failed to escape %q: %v", p.Value, err) v = p.Value } // a workaround to handle fragment/composing parameters until they are supported in denco router @@ -356,10 +392,10 @@ func (d *defaultRouter) Lookup(method, path string) (*MatchedRoute, bool) { return &MatchedRoute{routeEntry: *entry, Params: params}, true } } else { - debugLog("couldn't find a route by path for %s %s", method, path) + d.debugLogf("couldn't find a route by path for %s %s", method, path) } } else { - debugLog("couldn't find a route by method for %s %s", method, path) + d.debugLogf("couldn't find a route by method for %s %s", method, path) } return nil, false } @@ -378,6 +414,10 @@ func (d *defaultRouter) OtherMethods(method, path string) []string { return methods } +func (d *defaultRouter) SetLogger(lg logger.Logger) { + d.debugLogf = debugLogfFunc(lg) +} + // convert swagger parameters per path segment into a denco parameter as multiple parameters per segment are not supported in denco var pathConverter = regexp.MustCompile(`{(.+?)}([^/]*)`) @@ -413,7 +453,7 @@ func (d *defaultRouteBuilder) AddRoute(method, path string, operation *spec.Oper bp = bp[:len(bp)-1] } - debugLog("operation: %#v", *operation) + d.debugLogf("operation: %#v", *operation) if handler, ok := d.api.HandlerFor(method, strings.TrimPrefix(path, bp)); ok { consumes := d.analyzer.ConsumesFor(operation) produces := d.analyzer.ProducesFor(operation) @@ -428,6 +468,8 @@ func (d *defaultRouteBuilder) AddRoute(method, path string, operation *spec.Oper produces = append(produces, defProduces) } + requestBinder := NewUntypedRequestBinder(parameters, d.spec.Spec(), d.api.Formats()) + requestBinder.setDebugLogf(d.debugLogf) record := denco.NewRecord(pathConverter.ReplaceAllString(path, ":$1"), &routeEntry{ BasePath: bp, PathPattern: path, @@ -439,7 +481,7 @@ func (d *defaultRouteBuilder) AddRoute(method, path string, operation *spec.Oper Producers: d.api.ProducersFor(normalizeOffers(produces)), Parameters: parameters, Formats: d.api.Formats(), - Binder: NewUntypedRequestBinder(parameters, d.spec.Spec(), d.api.Formats()), + Binder: requestBinder, Authenticators: d.buildAuthenticators(operation), Authorizer: d.api.Authorizer(), }) @@ -449,11 +491,11 @@ func (d *defaultRouteBuilder) AddRoute(method, path string, operation *spec.Oper func (d *defaultRouteBuilder) buildAuthenticators(operation *spec.Operation) RouteAuthenticators { requirements := d.analyzer.SecurityRequirementsFor(operation) - var auths []RouteAuthenticator + auths := make([]RouteAuthenticator, 0, len(requirements)) for _, reqs := range requirements { - var schemes []string + schemes := make([]string, 0, len(reqs)) scopes := make(map[string][]string, len(reqs)) - var scopeSlices [][]string + scopeSlices := make([][]string, 0, len(reqs)) for _, req := range reqs { schemes = append(schemes, req.Name) scopes[req.Name] = req.Scopes @@ -482,7 +524,8 @@ func (d *defaultRouteBuilder) Build() *defaultRouter { routers[method] = router } return &defaultRouter{ - spec: d.spec, - routers: routers, + spec: d.spec, + routers: routers, + debugLogf: d.debugLogf, } } diff --git a/vendor/github.com/go-openapi/runtime/middleware/spec.go b/vendor/github.com/go-openapi/runtime/middleware/spec.go index f02914298..87e17e342 100644 --- a/vendor/github.com/go-openapi/runtime/middleware/spec.go +++ b/vendor/github.com/go-openapi/runtime/middleware/spec.go @@ -19,30 +19,84 @@ "path" ) -// Spec creates a middleware to serve a swagger spec. -// This allows for altering the spec before starting the http listener. -// This can be useful if you want to serve the swagger spec from another path than /swagger.json +const ( + contentTypeHeader = "Content-Type" + applicationJSON = "application/json" +) + +// SpecOption can be applied to the Spec serving middleware +type SpecOption func(*specOptions) + +var defaultSpecOptions = specOptions{ + Path: "", + Document: "swagger.json", +} + +type specOptions struct { + Path string + Document string +} + +func specOptionsWithDefaults(opts []SpecOption) specOptions { + o := defaultSpecOptions + for _, apply := range opts { + apply(&o) + } + + return o +} + +// Spec creates a middleware to serve a swagger spec as a JSON document. // -func Spec(basePath string, b []byte, next http.Handler) http.Handler { +// This allows for altering the spec before starting the http listener. +// +// The basePath argument indicates the path of the spec document (defaults to "/"). +// Additional SpecOption can be used to change the name of the document (defaults to "swagger.json"). +func Spec(basePath string, b []byte, next http.Handler, opts ...SpecOption) http.Handler { if basePath == "" { basePath = "/" } - pth := path.Join(basePath, "swagger.json") + o := specOptionsWithDefaults(opts) + pth := path.Join(basePath, o.Path, o.Document) return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { - if r.URL.Path == pth { - rw.Header().Set("Content-Type", "application/json") + if path.Clean(r.URL.Path) == pth { + rw.Header().Set(contentTypeHeader, applicationJSON) rw.WriteHeader(http.StatusOK) - //#nosec _, _ = rw.Write(b) + return } - if next == nil { - rw.Header().Set("Content-Type", "application/json") - rw.WriteHeader(http.StatusNotFound) + if next != nil { + next.ServeHTTP(rw, r) + return } - next.ServeHTTP(rw, r) + + rw.Header().Set(contentTypeHeader, applicationJSON) + rw.WriteHeader(http.StatusNotFound) }) } + +// WithSpecPath sets the path to be joined to the base path of the Spec middleware. +// +// This is empty by default. +func WithSpecPath(pth string) SpecOption { + return func(o *specOptions) { + o.Path = pth + } +} + +// WithSpecDocument sets the name of the JSON document served as a spec. +// +// By default, this is "swagger.json" +func WithSpecDocument(doc string) SpecOption { + return func(o *specOptions) { + if doc == "" { + return + } + + o.Document = doc + } +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/swaggerui.go b/vendor/github.com/go-openapi/runtime/middleware/swaggerui.go index b4dea29e4..ec3c10cba 100644 --- a/vendor/github.com/go-openapi/runtime/middleware/swaggerui.go +++ b/vendor/github.com/go-openapi/runtime/middleware/swaggerui.go @@ -8,40 +8,65 @@ "path" ) -// SwaggerUIOpts configures the Swaggerui middlewares +// SwaggerUIOpts configures the SwaggerUI middleware type SwaggerUIOpts struct { - // BasePath for the UI path, defaults to: / + // BasePath for the API, defaults to: / BasePath string - // Path combines with BasePath for the full UI path, defaults to: docs + + // Path combines with BasePath to construct the path to the UI, defaults to: "docs". Path string - // SpecURL the url to find the spec for + + // SpecURL is the URL of the spec document. + // + // Defaults to: /swagger.json SpecURL string + + // Title for the documentation site, default to: API documentation + Title string + + // Template specifies a custom template to serve the UI + Template string + // OAuthCallbackURL the url called after OAuth2 login OAuthCallbackURL string // The three components needed to embed swagger-ui - SwaggerURL string + + // SwaggerURL points to the js that generates the SwaggerUI site. + // + // Defaults to: https://unpkg.com/swagger-ui-dist/swagger-ui-bundle.js + SwaggerURL string + SwaggerPresetURL string SwaggerStylesURL string Favicon32 string Favicon16 string - - // Title for the documentation site, default to: API documentation - Title string } // EnsureDefaults in case some options are missing func (r *SwaggerUIOpts) EnsureDefaults() { - if r.BasePath == "" { - r.BasePath = "/" + r.ensureDefaults() + + if r.Template == "" { + r.Template = swaggeruiTemplate } - if r.Path == "" { - r.Path = "docs" - } - if r.SpecURL == "" { - r.SpecURL = "/swagger.json" +} + +func (r *SwaggerUIOpts) EnsureDefaultsOauth2() { + r.ensureDefaults() + + if r.Template == "" { + r.Template = swaggerOAuthTemplate } +} + +func (r *SwaggerUIOpts) ensureDefaults() { + common := toCommonUIOptions(r) + common.EnsureDefaults() + fromCommonToAnyOptions(common, r) + + // swaggerui-specifics if r.OAuthCallbackURL == "" { r.OAuthCallbackURL = path.Join(r.BasePath, r.Path, "oauth2-callback") } @@ -60,40 +85,22 @@ func (r *SwaggerUIOpts) EnsureDefaults() { if r.Favicon32 == "" { r.Favicon32 = swaggerFavicon32Latest } - if r.Title == "" { - r.Title = "API documentation" - } } // SwaggerUI creates a middleware to serve a documentation site for a swagger spec. +// // This allows for altering the spec before starting the http listener. func SwaggerUI(opts SwaggerUIOpts, next http.Handler) http.Handler { opts.EnsureDefaults() pth := path.Join(opts.BasePath, opts.Path) - tmpl := template.Must(template.New("swaggerui").Parse(swaggeruiTemplate)) + tmpl := template.Must(template.New("swaggerui").Parse(opts.Template)) + assets := bytes.NewBuffer(nil) + if err := tmpl.Execute(assets, opts); err != nil { + panic(fmt.Errorf("cannot execute template: %w", err)) + } - buf := bytes.NewBuffer(nil) - _ = tmpl.Execute(buf, &opts) - b := buf.Bytes() - - return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { - if path.Join(r.URL.Path) == pth { - rw.Header().Set("Content-Type", "text/html; charset=utf-8") - rw.WriteHeader(http.StatusOK) - - _, _ = rw.Write(b) - return - } - - if next == nil { - rw.Header().Set("Content-Type", "text/plain") - rw.WriteHeader(http.StatusNotFound) - _, _ = rw.Write([]byte(fmt.Sprintf("%q not found", pth))) - return - } - next.ServeHTTP(rw, r) - }) + return serveUI(pth, assets.Bytes(), next) } const ( diff --git a/vendor/github.com/go-openapi/runtime/middleware/swaggerui_oauth2.go b/vendor/github.com/go-openapi/runtime/middleware/swaggerui_oauth2.go index 576f6003f..e81212f71 100644 --- a/vendor/github.com/go-openapi/runtime/middleware/swaggerui_oauth2.go +++ b/vendor/github.com/go-openapi/runtime/middleware/swaggerui_oauth2.go @@ -4,37 +4,20 @@ "bytes" "fmt" "net/http" - "path" "text/template" ) func SwaggerUIOAuth2Callback(opts SwaggerUIOpts, next http.Handler) http.Handler { - opts.EnsureDefaults() + opts.EnsureDefaultsOauth2() pth := opts.OAuthCallbackURL - tmpl := template.Must(template.New("swaggeroauth").Parse(swaggerOAuthTemplate)) + tmpl := template.Must(template.New("swaggeroauth").Parse(opts.Template)) + assets := bytes.NewBuffer(nil) + if err := tmpl.Execute(assets, opts); err != nil { + panic(fmt.Errorf("cannot execute template: %w", err)) + } - buf := bytes.NewBuffer(nil) - _ = tmpl.Execute(buf, &opts) - b := buf.Bytes() - - return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { - if path.Join(r.URL.Path) == pth { - rw.Header().Set("Content-Type", "text/html; charset=utf-8") - rw.WriteHeader(http.StatusOK) - - _, _ = rw.Write(b) - return - } - - if next == nil { - rw.Header().Set("Content-Type", "text/plain") - rw.WriteHeader(http.StatusNotFound) - _, _ = rw.Write([]byte(fmt.Sprintf("%q not found", pth))) - return - } - next.ServeHTTP(rw, r) - }) + return serveUI(pth, assets.Bytes(), next) } const ( diff --git a/vendor/github.com/go-openapi/runtime/middleware/ui_options.go b/vendor/github.com/go-openapi/runtime/middleware/ui_options.go new file mode 100644 index 000000000..b86efa008 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/ui_options.go @@ -0,0 +1,173 @@ +package middleware + +import ( + "bytes" + "encoding/gob" + "fmt" + "net/http" + "path" + "strings" +) + +const ( + // constants that are common to all UI-serving middlewares + defaultDocsPath = "docs" + defaultDocsURL = "/swagger.json" + defaultDocsTitle = "API Documentation" +) + +// uiOptions defines common options for UI serving middlewares. +type uiOptions struct { + // BasePath for the UI, defaults to: / + BasePath string + + // Path combines with BasePath to construct the path to the UI, defaults to: "docs". + Path string + + // SpecURL is the URL of the spec document. + // + // Defaults to: /swagger.json + SpecURL string + + // Title for the documentation site, default to: API documentation + Title string + + // Template specifies a custom template to serve the UI + Template string +} + +// toCommonUIOptions converts any UI option type to retain the common options. +// +// This uses gob encoding/decoding to convert common fields from one struct to another. +func toCommonUIOptions(opts interface{}) uiOptions { + var buf bytes.Buffer + enc := gob.NewEncoder(&buf) + dec := gob.NewDecoder(&buf) + var o uiOptions + err := enc.Encode(opts) + if err != nil { + panic(err) + } + + err = dec.Decode(&o) + if err != nil { + panic(err) + } + + return o +} + +func fromCommonToAnyOptions[T any](source uiOptions, target *T) { + var buf bytes.Buffer + enc := gob.NewEncoder(&buf) + dec := gob.NewDecoder(&buf) + err := enc.Encode(source) + if err != nil { + panic(err) + } + + err = dec.Decode(target) + if err != nil { + panic(err) + } +} + +// UIOption can be applied to UI serving middleware, such as Context.APIHandler or +// Context.APIHandlerSwaggerUI to alter the defaut behavior. +type UIOption func(*uiOptions) + +func uiOptionsWithDefaults(opts []UIOption) uiOptions { + var o uiOptions + for _, apply := range opts { + apply(&o) + } + + return o +} + +// WithUIBasePath sets the base path from where to serve the UI assets. +// +// By default, Context middleware sets this value to the API base path. +func WithUIBasePath(base string) UIOption { + return func(o *uiOptions) { + if !strings.HasPrefix(base, "/") { + base = "/" + base + } + o.BasePath = base + } +} + +// WithUIPath sets the path from where to serve the UI assets (i.e. /{basepath}/{path}. +func WithUIPath(pth string) UIOption { + return func(o *uiOptions) { + o.Path = pth + } +} + +// WithUISpecURL sets the path from where to serve swagger spec document. +// +// This may be specified as a full URL or a path. +// +// By default, this is "/swagger.json" +func WithUISpecURL(specURL string) UIOption { + return func(o *uiOptions) { + o.SpecURL = specURL + } +} + +// WithUITitle sets the title of the UI. +// +// By default, Context middleware sets this value to the title found in the API spec. +func WithUITitle(title string) UIOption { + return func(o *uiOptions) { + o.Title = title + } +} + +// WithTemplate allows to set a custom template for the UI. +// +// UI middleware will panic if the template does not parse or execute properly. +func WithTemplate(tpl string) UIOption { + return func(o *uiOptions) { + o.Template = tpl + } +} + +// EnsureDefaults in case some options are missing +func (r *uiOptions) EnsureDefaults() { + if r.BasePath == "" { + r.BasePath = "/" + } + if r.Path == "" { + r.Path = defaultDocsPath + } + if r.SpecURL == "" { + r.SpecURL = defaultDocsURL + } + if r.Title == "" { + r.Title = defaultDocsTitle + } +} + +// serveUI creates a middleware that serves a templated asset as text/html. +func serveUI(pth string, assets []byte, next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + if path.Clean(r.URL.Path) == pth { + rw.Header().Set(contentTypeHeader, "text/html; charset=utf-8") + rw.WriteHeader(http.StatusOK) + _, _ = rw.Write(assets) + + return + } + + if next != nil { + next.ServeHTTP(rw, r) + + return + } + + rw.Header().Set(contentTypeHeader, "text/plain") + rw.WriteHeader(http.StatusNotFound) + _, _ = rw.Write([]byte(fmt.Sprintf("%q not found", pth))) + }) +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/untyped/api.go b/vendor/github.com/go-openapi/runtime/middleware/untyped/api.go index 39a85f7d9..7b7269bd1 100644 --- a/vendor/github.com/go-openapi/runtime/middleware/untyped/api.go +++ b/vendor/github.com/go-openapi/runtime/middleware/untyped/api.go @@ -197,30 +197,31 @@ func (d *API) Validate() error { // validateWith validates the registrations in this API against the provided spec analyzer func (d *API) validate() error { - var consumes []string + consumes := make([]string, 0, len(d.consumers)) for k := range d.consumers { consumes = append(consumes, k) } - var produces []string + produces := make([]string, 0, len(d.producers)) for k := range d.producers { produces = append(produces, k) } - var authenticators []string + authenticators := make([]string, 0, len(d.authenticators)) for k := range d.authenticators { authenticators = append(authenticators, k) } - var operations []string + operations := make([]string, 0, len(d.operations)) for m, v := range d.operations { for p := range v { operations = append(operations, fmt.Sprintf("%s %s", strings.ToUpper(m), p)) } } - var definedAuths []string - for k := range d.spec.Spec().SecurityDefinitions { + secDefinitions := d.spec.Spec().SecurityDefinitions + definedAuths := make([]string, 0, len(secDefinitions)) + for k := range secDefinitions { definedAuths = append(definedAuths, k) } @@ -267,7 +268,7 @@ func (d *API) verify(name string, registrations []string, expectations []string) delete(expected, k) } - var unregistered []string + unregistered := make([]string, 0, len(expected)) for k := range expected { unregistered = append(unregistered, k) } diff --git a/vendor/github.com/go-openapi/runtime/middleware/validation.go b/vendor/github.com/go-openapi/runtime/middleware/validation.go index 1f0135b57..0a5356c60 100644 --- a/vendor/github.com/go-openapi/runtime/middleware/validation.go +++ b/vendor/github.com/go-openapi/runtime/middleware/validation.go @@ -35,7 +35,6 @@ type validation struct { // ContentType validates the content type of a request func validateContentType(allowed []string, actual string) error { - debugLog("validating content type for %q against [%s]", actual, strings.Join(allowed, ", ")) if len(allowed) == 0 { return nil } @@ -57,13 +56,13 @@ func validateContentType(allowed []string, actual string) error { } func validateRequest(ctx *Context, request *http.Request, route *MatchedRoute) *validation { - debugLog("validating request %s %s", request.Method, request.URL.EscapedPath()) validate := &validation{ context: ctx, request: request, route: route, bound: make(map[string]interface{}), } + validate.debugLogf("validating request %s %s", request.Method, request.URL.EscapedPath()) validate.contentType() if len(validate.result) == 0 { @@ -76,8 +75,12 @@ func validateRequest(ctx *Context, request *http.Request, route *MatchedRoute) * return validate } +func (v *validation) debugLogf(format string, args ...any) { + v.context.debugLogf(format, args...) +} + func (v *validation) parameters() { - debugLog("validating request parameters for %s %s", v.request.Method, v.request.URL.EscapedPath()) + v.debugLogf("validating request parameters for %s %s", v.request.Method, v.request.URL.EscapedPath()) if result := v.route.Binder.Bind(v.request, v.route.Params, v.route.Consumer, v.bound); result != nil { if result.Error() == "validation failure list" { for _, e := range result.(*errors.Validation).Value.([]interface{}) { @@ -91,7 +94,7 @@ func (v *validation) parameters() { func (v *validation) contentType() { if len(v.result) == 0 && runtime.HasBody(v.request) { - debugLog("validating body content type for %s %s", v.request.Method, v.request.URL.EscapedPath()) + v.debugLogf("validating body content type for %s %s", v.request.Method, v.request.URL.EscapedPath()) ct, _, req, err := v.context.ContentType(v.request) if err != nil { v.result = append(v.result, err) @@ -100,6 +103,7 @@ func (v *validation) contentType() { } if len(v.result) == 0 { + v.debugLogf("validating content type for %q against [%s]", ct, strings.Join(v.route.Consumes, ", ")) if err := validateContentType(v.route.Consumes, ct); err != nil { v.result = append(v.result, err) } diff --git a/vendor/github.com/go-openapi/runtime/request.go b/vendor/github.com/go-openapi/runtime/request.go index 078fda173..9e3e1ecb1 100644 --- a/vendor/github.com/go-openapi/runtime/request.go +++ b/vendor/github.com/go-openapi/runtime/request.go @@ -16,6 +16,8 @@ import ( "bufio" + "context" + "errors" "io" "net/http" "strings" @@ -96,10 +98,16 @@ func (p *peekingReader) Read(d []byte) (int, error) { if p == nil { return 0, io.EOF } + if p.underlying == nil { + return 0, io.ErrUnexpectedEOF + } return p.underlying.Read(d) } func (p *peekingReader) Close() error { + if p.underlying == nil { + return errors.New("reader already closed") + } p.underlying = nil if p.orig != nil { return p.orig.Close() @@ -107,9 +115,11 @@ func (p *peekingReader) Close() error { return nil } -// JSONRequest creates a new http request with json headers set +// JSONRequest creates a new http request with json headers set. +// +// It uses context.Background. func JSONRequest(method, urlStr string, body io.Reader) (*http.Request, error) { - req, err := http.NewRequest(method, urlStr, body) + req, err := http.NewRequestWithContext(context.Background(), method, urlStr, body) if err != nil { return nil, err } diff --git a/vendor/github.com/go-openapi/runtime/security/authenticator.go b/vendor/github.com/go-openapi/runtime/security/authenticator.go index c3ffdac7e..bb30472bb 100644 --- a/vendor/github.com/go-openapi/runtime/security/authenticator.go +++ b/vendor/github.com/go-openapi/runtime/security/authenticator.go @@ -25,12 +25,13 @@ ) const ( - query = "query" - header = "header" + query = "query" + header = "header" + accessTokenParam = "access_token" ) // HttpAuthenticator is a function that authenticates a HTTP request -func HttpAuthenticator(handler func(*http.Request) (bool, interface{}, error)) runtime.Authenticator { +func HttpAuthenticator(handler func(*http.Request) (bool, interface{}, error)) runtime.Authenticator { //nolint:revive,stylecheck return runtime.AuthenticatorFunc(func(params interface{}) (bool, interface{}, error) { if request, ok := params.(*http.Request); ok { return handler(request) @@ -158,7 +159,7 @@ func APIKeyAuth(name, in string, authenticate TokenAuthentication) runtime.Authe inl := strings.ToLower(in) if inl != query && inl != header { // panic because this is most likely a typo - panic(errors.New(500, "api key auth: in value needs to be either \"query\" or \"header\".")) + panic(errors.New(500, "api key auth: in value needs to be either \"query\" or \"header\"")) } var getToken func(*http.Request) string @@ -186,7 +187,7 @@ func APIKeyAuthCtx(name, in string, authenticate TokenAuthenticationCtx) runtime inl := strings.ToLower(in) if inl != query && inl != header { // panic because this is most likely a typo - panic(errors.New(500, "api key auth: in value needs to be either \"query\" or \"header\".")) + panic(errors.New(500, "api key auth: in value needs to be either \"query\" or \"header\"")) } var getToken func(*http.Request) string @@ -226,12 +227,12 @@ func BearerAuth(name string, authenticate ScopedTokenAuthentication) runtime.Aut } if token == "" { qs := r.Request.URL.Query() - token = qs.Get("access_token") + token = qs.Get(accessTokenParam) } //#nosec ct, _, _ := runtime.ContentType(r.Request.Header) if token == "" && (ct == "application/x-www-form-urlencoded" || ct == "multipart/form-data") { - token = r.Request.FormValue("access_token") + token = r.Request.FormValue(accessTokenParam) } if token == "" { @@ -256,12 +257,12 @@ func BearerAuthCtx(name string, authenticate ScopedTokenAuthenticationCtx) runti } if token == "" { qs := r.Request.URL.Query() - token = qs.Get("access_token") + token = qs.Get(accessTokenParam) } //#nosec ct, _, _ := runtime.ContentType(r.Request.Header) if token == "" && (ct == "application/x-www-form-urlencoded" || ct == "multipart/form-data") { - token = r.Request.FormValue("access_token") + token = r.Request.FormValue(accessTokenParam) } if token == "" { diff --git a/vendor/github.com/go-openapi/spec/.gitignore b/vendor/github.com/go-openapi/spec/.gitignore index dd91ed6a0..f47cb2045 100644 --- a/vendor/github.com/go-openapi/spec/.gitignore +++ b/vendor/github.com/go-openapi/spec/.gitignore @@ -1,2 +1 @@ -secrets.yml -coverage.out +*.out diff --git a/vendor/github.com/go-openapi/spec/.golangci.yml b/vendor/github.com/go-openapi/spec/.golangci.yml index 835d55e74..22f8d21cc 100644 --- a/vendor/github.com/go-openapi/spec/.golangci.yml +++ b/vendor/github.com/go-openapi/spec/.golangci.yml @@ -11,7 +11,7 @@ linters-settings: threshold: 200 goconst: min-len: 2 - min-occurrences: 2 + min-occurrences: 3 linters: enable-all: true @@ -40,3 +40,22 @@ linters: - tparallel - thelper - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/spec/README.md b/vendor/github.com/go-openapi/spec/README.md index 18782c6da..7fd2810c6 100644 --- a/vendor/github.com/go-openapi/spec/README.md +++ b/vendor/github.com/go-openapi/spec/README.md @@ -1,8 +1,5 @@ -# OAI object model +# OpenAPI v2 object model [![Build Status](https://github.com/go-openapi/spec/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/spec/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/spec/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/spec) -[![Build Status](https://travis-ci.org/go-openapi/spec.svg?branch=master)](https://travis-ci.org/go-openapi/spec) - -[![codecov](https://codecov.io/gh/go-openapi/spec/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/spec) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) [![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/spec/master/LICENSE) [![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/spec.svg)](https://pkg.go.dev/github.com/go-openapi/spec) @@ -32,3 +29,26 @@ The object model for OpenAPI specification documents. > This [discussion thread](https://github.com/go-openapi/spec/issues/21) relates the full story. > > An early attempt to support Swagger 3 may be found at: https://github.com/go-openapi/spec3 + +* Does the unmarshaling support YAML? + +> Not directly. The exposed types know only how to unmarshal from JSON. +> +> In order to load a YAML document as a Swagger spec, you need to use the loaders provided by +> github.com/go-openapi/loads +> +> Take a look at the example there: https://pkg.go.dev/github.com/go-openapi/loads#example-Spec +> +> See also https://github.com/go-openapi/spec/issues/164 + +* How can I validate a spec? + +> Validation is provided by [the validate package](http://github.com/go-openapi/validate) + +* Why do we have an `ID` field for `Schema` which is not part of the swagger spec? + +> We found jsonschema compatibility more important: since `id` in jsonschema influences +> how `$ref` are resolved. +> This `id` does not conflict with any property named `id`. +> +> See also https://github.com/go-openapi/spec/issues/23 diff --git a/vendor/github.com/go-openapi/spec/appveyor.yml b/vendor/github.com/go-openapi/spec/appveyor.yml deleted file mode 100644 index 090359391..000000000 --- a/vendor/github.com/go-openapi/spec/appveyor.yml +++ /dev/null @@ -1,32 +0,0 @@ -version: "0.1.{build}" - -clone_folder: C:\go-openapi\spec -shallow_clone: true # for startup speed -pull_requests: - do_not_increment_build_number: true - -#skip_tags: true -#skip_branch_with_pr: true - -# appveyor.yml -build: off - -environment: - GOPATH: c:\gopath - -stack: go 1.15 - -test_script: - - go test -v -timeout 20m ./... - -deploy: off - -notifications: - - provider: Slack - incoming_webhook: https://hooks.slack.com/services/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 - auth_token: - secure: Sf7kZf7ZGbnwWUMpffHwMu5A0cHkLK2MYY32LNTPj4+/3qC3Ghl7+9v4TSLOqOlCwdRNjOGblAq7s+GDJed6/xgRQl1JtCi1klzZNrYX4q01pgTPvvGcwbBkIYgeMaPeIRcK9OZnud7sRXdttozgTOpytps2U6Js32ip7uj5mHSg2ub0FwoSJwlS6dbezZ8+eDhoha0F/guY99BEwx8Bd+zROrT2TFGsSGOFGN6wFc7moCqTHO/YkWib13a2QNXqOxCCVBy/lt76Wp+JkeFppjHlzs/2lP3EAk13RIUAaesdEUHvIHrzCyNJEd3/+KO2DzsWOYfpktd+KBCvgaYOsoo7ubdT3IROeAegZdCgo/6xgCEsmFc9ZcqCfN5yNx2A+BZ2Vwmpws+bQ1E1+B5HDzzaiLcYfG4X2O210QVGVDLWsv1jqD+uPYeHY2WRfh5ZsIUFvaqgUEnwHwrK44/8REAhQavt1QAj5uJpsRd7CkRVPWRNK+yIky+wgbVUFEchRNmS55E7QWf+W4+4QZkQi7vUTMc9nbTUu2Es9NfvfudOpM2wZbn98fjpb/qq/nRv6Bk+ca+7XD5/IgNLMbWp2ouDdzbiHLCOfDUiHiDJhLfFZx9Bwo7ZwfzeOlbrQX66bx7xRKYmOe4DLrXhNcpbsMa8qbfxlZRCmYbubB/Y8h4= - channel: bots - on_build_success: false - on_build_failure: true - on_build_status_changed: true diff --git a/vendor/github.com/go-openapi/spec/bindata.go b/vendor/github.com/go-openapi/spec/bindata.go deleted file mode 100644 index afc83850c..000000000 --- a/vendor/github.com/go-openapi/spec/bindata.go +++ /dev/null @@ -1,297 +0,0 @@ -// Code generated by go-bindata. DO NOT EDIT. -// sources: -// schemas/jsonschema-draft-04.json (4.357kB) -// schemas/v2/schema.json (40.248kB) - -package spec - -import ( - "bytes" - "compress/gzip" - "crypto/sha256" - "fmt" - "io" - "io/ioutil" - "os" - "path/filepath" - "strings" - "time" -) - -func bindataRead(data []byte, name string) ([]byte, error) { - gz, err := gzip.NewReader(bytes.NewBuffer(data)) - if err != nil { - return nil, fmt.Errorf("read %q: %v", name, err) - } - - var buf bytes.Buffer - _, err = io.Copy(&buf, gz) - clErr := gz.Close() - - if err != nil { - return nil, fmt.Errorf("read %q: %v", name, err) - } - if clErr != nil { - return nil, err - } - - return buf.Bytes(), nil -} - -type asset struct { - bytes []byte - info os.FileInfo - digest [sha256.Size]byte -} - -type bindataFileInfo struct { - name string - size int64 - mode os.FileMode - modTime time.Time -} - -func (fi bindataFileInfo) Name() string { - return fi.name -} -func (fi bindataFileInfo) Size() int64 { - return fi.size -} -func (fi bindataFileInfo) Mode() os.FileMode { - return fi.mode -} -func (fi bindataFileInfo) ModTime() time.Time { - return fi.modTime -} -func (fi bindataFileInfo) IsDir() bool { - return false -} -func (fi bindataFileInfo) Sys() interface{} { - return nil -} - -var _jsonschemaDraft04Json = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xc4\x57\x3d\x6f\xdb\x3c\x10\xde\xf3\x2b\x08\x26\x63\xf2\x2a\x2f\xd0\xc9\x5b\xd1\x2e\x01\x5a\x34\x43\x37\x23\x03\x6d\x9d\x6c\x06\x14\xa9\x50\x54\x60\xc3\xd0\x7f\x2f\x28\x4a\x14\x29\x91\x92\x2d\xa7\x8d\x97\x28\xbc\xaf\xe7\x8e\xf7\xc5\xd3\x0d\x42\x08\x61\x9a\xe2\x15\xc2\x7b\xa5\x8a\x55\x92\xbc\x96\x82\x3f\x94\xdb\x3d\xe4\xe4\x3f\x21\x77\x49\x2a\x49\xa6\x1e\x1e\xbf\x24\xe6\xec\x16\xdf\x1b\xa1\x3b\xf3\xff\x02\xc9\x14\xca\xad\xa4\x85\xa2\x82\x6b\xe9\x6f\x42\x02\x32\x2c\x28\x07\x45\x5a\x15\x3d\x77\x46\x39\xd5\xcc\x25\x5e\x21\x83\xb8\x21\x18\xb6\xaf\x52\x92\xa3\x47\x68\x88\xea\x58\x80\x56\x4e\x1a\xf2\xbd\x4f\xcc\x29\x7f\x52\x90\x6b\x7d\xff\x0f\x48\xb4\x3d\x3f\x21\x7c\x27\x21\xd3\x2a\x6e\x31\xaa\x2d\x53\xdd\xf3\xe3\x42\x94\x54\xd1\x77\x78\xe2\x0a\x76\x20\xe3\x20\x68\xcb\x30\x86\x41\xf3\x2a\xc7\x2b\xf4\x78\x8e\xfe\xef\x90\x91\x8a\xa9\xc7\xb1\x1d\xc2\xd8\x2f\x0d\x75\xed\xc1\x4e\x9c\xc8\x25\x43\xac\xa8\xbe\xd7\xcc\xa9\xd1\xa9\x21\xa0\x1a\xbd\x04\x61\x94\x34\x2f\x18\xfc\x3e\x16\x50\x8e\x4d\x03\x6f\x1c\x58\xdb\x48\x23\xbc\x11\x82\x01\xe1\xfa\xd3\x3a\x8e\x30\xaf\x18\x33\x7f\xf3\x8d\x39\x11\x9b\x57\xd8\x2a\xfd\x55\x2a\x49\xf9\x0e\xc7\xec\x37\xd4\x25\xf7\xec\x5c\x66\xc7\xd7\x99\xaa\xcf\x4f\x89\x8a\xd3\xb7\x0a\x3a\xaa\x92\x15\xf4\x30\x6f\x1c\xb0\xd6\x46\xe7\x98\x39\x2d\xa4\x28\x40\x2a\x3a\x88\x9e\x29\xba\x88\x37\x2d\xca\x60\x38\xfa\xba\x5b\x20\xac\xa8\x62\xb0\x4c\xd4\xaf\xda\x45\x0a\xba\x5c\x3b\xb9\xc7\x79\xc5\x14\x2d\x18\x34\x19\x1c\x51\xdb\x25\x4d\xb4\x7e\x06\x14\x38\x6c\x59\x55\xd2\x77\xf8\x69\x59\xfc\x7b\x73\xed\x93\x43\xcb\x32\x6d\x3c\x28\xdc\x1b\x9a\xd3\x62\xab\xc2\x27\xf7\x41\xc9\x08\x2b\x23\x08\xad\x13\x57\x21\x9c\xd3\x72\x0d\x42\x72\xf8\x01\x7c\xa7\xf6\x83\xce\x39\xd7\x82\x3c\x1f\x2f\xd6\x60\x1b\xa2\xdf\x35\x89\x52\x20\xe7\x73\x74\xe0\x66\x26\x64\x4e\xb4\x97\x58\xc2\x0e\x0e\xe1\x60\x92\x34\x6d\xa0\x10\xd6\xb5\x83\x61\x27\xe6\x47\xd3\x89\xbd\x63\xfd\x3b\x8d\x03\x3d\x6c\x42\x2d\x5b\x70\xee\xe8\xdf\x4b\xf4\x66\x4e\xe1\x01\x45\x17\x80\x74\xad\x4f\xc3\xf3\xae\xc6\x1d\xc6\xd7\xc2\xce\xc9\xe1\x29\x30\x86\x2f\x4a\xa6\x4b\x15\x84\x73\xc9\x6f\xfd\x7f\xa5\x6e\x9e\xbd\xf1\xb0\xd4\xdd\x45\x5a\xc2\x3e\x4b\x78\xab\xa8\x84\x74\x4a\x91\x3b\x92\x23\x05\xf2\x1c\x1e\x7b\xf3\x09\xf8\xcf\xab\x24\xb6\x60\xa2\xe8\x4c\x9f\x75\x77\xaa\x8c\xe6\x01\x45\x36\x86\xcf\xc3\x63\x3a\xea\xd4\x8d\x7e\x06\xac\x14\x0a\xe0\x29\xf0\xed\x07\x22\x1a\x65\xda\x44\xae\xa2\x73\x1a\xe6\x90\x69\xa2\x8c\x46\xb2\x2f\xde\x49\x38\x08\xed\xfe\xfd\x41\xaf\x9f\xa9\x55\xd7\xdd\x22\x8d\xfa\x45\x63\xc5\x0f\x80\xf3\xb4\x08\xd6\x79\x30\x9e\x93\xee\x59\xa6\xd0\x4b\xee\x22\xe3\x33\xc1\x3a\x27\x68\x36\x78\x7e\x87\x0a\x06\xd5\x2e\x20\xd3\xaf\x15\xfb\xd8\x3b\x73\x14\xbb\x92\xed\x05\x5d\x2e\x29\x38\x2c\x94\xe4\x42\x45\x5e\xd3\xb5\x7d\xdf\x47\xca\x38\xb4\x5c\xaf\xfb\x7d\xdd\x6d\xf4\xa1\x2d\x77\xdd\x2f\xce\x6d\xc4\x7b\x8b\x4e\x67\xa9\x6f\xfe\x04\x00\x00\xff\xff\xb1\xd1\x27\x78\x05\x11\x00\x00") - -func jsonschemaDraft04JsonBytes() ([]byte, error) { - return bindataRead( - _jsonschemaDraft04Json, - "jsonschema-draft-04.json", - ) -} - -func jsonschemaDraft04Json() (*asset, error) { - bytes, err := jsonschemaDraft04JsonBytes() - if err != nil { - return nil, err - } - - info := bindataFileInfo{name: "jsonschema-draft-04.json", size: 4357, mode: os.FileMode(0640), modTime: time.Unix(1568963823, 0)} - a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xe1, 0x48, 0x9d, 0xb, 0x47, 0x55, 0xf0, 0x27, 0x93, 0x30, 0x25, 0x91, 0xd3, 0xfc, 0xb8, 0xf0, 0x7b, 0x68, 0x93, 0xa8, 0x2a, 0x94, 0xf2, 0x48, 0x95, 0xf8, 0xe4, 0xed, 0xf1, 0x1b, 0x82, 0xe2}} - return a, nil -} - -var _v2SchemaJson = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xec\x5d\x4f\x93\xdb\x36\xb2\xbf\xfb\x53\xa0\x14\x57\xd9\xae\xd8\x92\xe3\xf7\x2e\xcf\x97\xd4\xbc\xd8\x49\x66\x37\x5e\x4f\x79\x26\xbb\x87\x78\x5c\x05\x91\x2d\x09\x09\x09\x30\x00\x38\x33\x5a\xef\x7c\xf7\x2d\xf0\x9f\x08\x02\x20\x41\x8a\xd2\xc8\x0e\x0f\xa9\x78\x28\xa0\xd1\xdd\x68\x34\x7e\xdd\xf8\xf7\xf9\x11\x42\x33\x49\x64\x04\xb3\xd7\x68\x76\x86\xfe\x76\xf9\xfe\x1f\xe8\x32\xd8\x40\x8c\xd1\x8a\x71\x74\x79\x8b\xd7\x6b\xe0\xe8\xd5\xfc\x25\x3a\xbb\x38\x9f\xcf\x9e\xab\x0a\x24\x54\xa5\x37\x52\x26\xaf\x17\x0b\x91\x17\x99\x13\xb6\xb8\x79\xb5\x10\x59\xdd\xf9\xef\x82\xd1\x6f\xf2\xc2\x8f\xf3\x4f\xb5\x1a\xea\xc7\x17\x45\x41\xc6\xd7\x8b\x90\xe3\x95\x7c\xf1\xf2\x7f\x8b\xca\x45\x3d\xb9\x4d\x32\xa6\xd8\xf2\x77\x08\x64\xfe\x8d\xc3\x9f\x29\xe1\xa0\x9a\xff\xed\x11\x42\x08\xcd\x8a\xd6\xb3\x9f\x15\x67\x74\xc5\xca\x7f\x27\x58\x6e\xc4\xec\x11\x42\xd7\x59\x5d\x1c\x86\x44\x12\x46\x71\x74\xc1\x59\x02\x5c\x12\x10\xb3\xd7\x68\x85\x23\x01\x59\x81\x04\x4b\x09\x9c\x6a\xbf\x7e\xce\x49\x7d\xba\x7b\x51\xfd\xa1\x44\xe2\xb0\x52\xac\x7d\xb3\x08\x61\x45\x68\x46\x56\x2c\x6e\x80\x86\x8c\xbf\xbd\x93\x40\x05\x61\x74\x96\x95\xbe\x7f\x84\xd0\x7d\x4e\xde\x42\xb7\xe4\xbe\x46\xbb\x14\x5b\x48\x4e\xe8\xba\x90\x05\xa1\x19\xd0\x34\xae\xc4\xce\xbe\xbc\x9a\xbf\x9c\x15\x7f\x5d\x57\xc5\x42\x10\x01\x27\x89\xe2\x48\x51\xb9\xda\x40\xd5\x87\x37\xc0\x15\x5f\x88\xad\x90\xdc\x10\x81\x42\x16\xa4\x31\x50\x39\x2f\x38\xad\xab\xb0\x53\xd8\xac\x94\x56\x6f\xc3\x84\xf4\x11\xa4\x50\xb3\xfa\xe9\xd3\x6f\x9f\x3e\xdf\x2f\xd0\xeb\x8f\x1f\x3f\x7e\xbc\xfe\xf6\xe9\xf7\xaf\x5f\x7f\xfc\x18\x7e\xfb\xec\xfb\xc7\xb3\x36\x79\x54\x43\xe8\x29\xc5\x31\x20\xc6\x11\x49\x9e\xe5\x12\x41\x66\xa0\xe8\xed\x1d\x8e\x93\x08\x5e\xa3\x27\x3b\xc3\x7c\xa2\x73\xba\xc4\x02\x2e\xb0\xdc\xf4\xe5\x76\xd1\xca\x96\xa2\x8a\x94\xcd\x21\xc9\x6c\xec\x2c\x70\x42\x9e\x34\x74\x9d\x19\x7c\xcd\x20\x9c\xea\x2e\x0a\xfe\x42\x84\xd4\x29\x04\x8c\x8a\xb4\x41\xa2\xc1\xdc\x19\x8a\x88\x90\x4a\x49\xef\xce\xdf\xbd\x45\x4a\x52\x81\x70\x10\x40\x22\x21\x44\xcb\x6d\xc5\xec\x4e\x3c\x1c\x45\xef\x57\x9a\xb5\x7d\xae\xfe\xe5\xe4\x31\x86\x90\xe0\xab\x6d\x02\x3b\x2e\xcb\x11\x90\xd9\xa8\xc6\x77\xc2\x59\x98\x06\xfd\xf9\x2e\x78\x45\x01\xa6\xa8\xa0\x71\x5c\xbe\x33\xa7\xd2\xd9\x5f\x95\xef\xd9\xd5\xac\xfd\xdc\x5d\xbf\x5e\xb8\xd1\x3e\xc7\x31\x48\xe0\x5e\x4c\x14\x65\xdf\xb8\xa8\x71\x10\x09\xa3\xc2\xc7\x02\xcb\xa2\x4e\x5a\x02\x82\x94\x13\xb9\xf5\x30\xe6\xb2\xa4\xb5\xfe\x9b\x3e\x7a\xb2\x55\xd2\xa8\x4a\xbc\x16\xb6\x71\x8e\x39\xc7\xdb\x9d\xe1\x10\x09\x71\xbd\x9c\xb3\x41\x89\xd7\xa5\x89\xdc\x57\xb5\x53\x4a\xfe\x4c\xe1\xbc\xa0\x21\x79\x0a\x1a\x0f\x70\xa7\x5c\x08\x8e\xde\xb0\xc0\x43\x24\xad\x74\x63\x0e\xb1\xd9\x90\xe1\xb0\x2d\x13\xa7\x6d\x78\xfd\x04\x14\x38\x8e\x90\xaa\xce\x63\xac\x3e\x23\xbc\x64\xa9\xb4\xf8\x03\x63\xde\xcd\xbe\x16\x13\x4a\x55\xac\x82\x12\xc6\xac\xd4\x35\xf7\x22\xd4\x3a\xff\x22\x73\x0e\x6e\x51\xa0\x75\x1e\xae\x8f\xe8\x5d\xc7\x59\xe6\xe4\x9a\x18\x8d\xd6\x1c\x53\x84\x4d\xb7\x67\x28\x37\x09\x84\x69\x88\x12\x0e\x01\x11\x80\x32\xa2\xf5\xb9\xaa\xc6\xd9\x73\x53\xab\xfb\xb4\x2e\x20\xc6\x54\x92\xa0\x9a\xf3\x69\x1a\x2f\x81\x77\x37\xae\x53\x1a\xce\x40\xc4\xa8\x82\x1c\xb5\xef\xda\x24\x7d\xb9\x61\x69\x14\xa2\x25\xa0\x90\xac\x56\xc0\x81\x4a\xb4\xe2\x2c\xce\x4a\x64\x7a\x9a\x23\xf4\x13\x91\x3f\xa7\x4b\xf4\x63\x84\x6f\x18\x87\x10\xbd\xc3\xfc\x8f\x90\xdd\x52\x44\x04\xc2\x51\xc4\x6e\x21\x74\x48\x21\x81\xc7\xe2\xfd\xea\x12\xf8\x0d\x09\xf6\xe9\x47\x35\xaf\x67\xc4\x14\xf7\x22\x27\x97\xe1\xe2\x76\x2d\x06\x8c\x4a\x1c\x48\x3f\x73\x2d\x0b\x5b\x29\x45\x24\x00\x2a\x0c\x11\xec\x94\xca\xc2\xa6\xc1\x37\x21\x43\x83\x3b\x5f\x97\xf1\x43\x5e\x53\x73\x19\xa5\x36\xd8\x2d\x05\x2e\x34\x0b\xeb\x39\xfc\x1d\x63\x51\x01\xbd\x3d\xbb\x90\x84\x40\x25\x59\x6d\x09\x5d\xa3\x1c\x37\xe6\x5c\x16\x9a\x40\x09\x70\xc1\xe8\x82\xf1\x35\xa6\xe4\xdf\x99\x5c\x8e\x9e\x4d\x79\xb4\x27\x2f\xbf\x7e\xf8\x05\x25\x8c\x50\xa9\x98\x29\x90\x62\x60\xea\x75\xae\x13\xca\xbf\x2b\x1a\x29\x27\x76\xd6\x20\xc6\x64\x5f\xe6\x32\x1a\x08\x87\x21\x07\x21\xbc\xb4\xe4\xe0\x32\x67\xa6\xcd\xf3\x1e\xcd\xd9\x6b\xb6\x6f\x8e\x27\xa7\xed\xdb\xe7\xbc\xcc\x1a\x07\xce\x6f\x87\x33\xf0\xba\x51\x17\x22\x66\x78\x79\x8e\xce\xe5\x13\x81\x80\x06\x2c\xe5\x78\x0d\xa1\xb2\xb8\x54\xa8\x79\x09\xbd\xbf\x3c\x47\x01\x8b\x13\x2c\xc9\x32\xaa\xaa\x1d\xd5\xee\xab\x36\xbd\x6c\xfd\x54\x6c\xc8\x08\x01\x3c\xbd\xe7\x07\x88\xb0\x24\x37\x79\x90\x28\x4a\x1d\x10\x1a\x92\x1b\x12\xa6\x38\x42\x40\xc3\x4c\x43\x62\x8e\xae\x36\xb0\x45\x71\x2a\xa4\x9a\x23\x79\x59\xb1\xa8\xf2\xa4\x0c\x60\x9f\xcc\x8d\x40\xf5\x80\xca\xa8\x99\xc3\xa7\x85\x1f\x31\x25\xa9\x82\xc5\x6d\xbd\xd8\x36\x76\x7c\x02\x28\x97\xf6\x1d\x74\x3b\x11\x7e\x91\xae\x32\xf8\x6c\xf4\xe6\x7b\x9a\xa5\x1f\x62\xc6\x21\xcf\x9a\xe5\xed\x8b\x02\xf3\x2c\x33\x33\xdf\x00\xca\xc9\x09\xb4\x04\xf5\xa5\x08\xd7\xc3\x02\x18\x66\xf1\xab\x1e\x83\x37\x4c\xcd\x12\xc1\x1d\x50\xf6\xaa\xbd\xfe\xe2\x73\x48\x38\x08\xa0\x32\x9b\x18\x44\x86\x0b\x6a\xc1\xaa\x26\x96\x2d\x96\x3c\xa0\x54\x65\x73\xe3\x08\xb5\x8b\x99\xbd\x82\xbc\x9e\xc2\xe8\x53\x46\x83\x3f\x33\x54\x2b\x5b\xad\x92\x79\xd9\x8f\x5d\x93\x98\xf2\xe6\xc6\x1c\xe6\x9a\x9e\xfc\x43\x82\x31\x66\x8e\x53\x77\xfe\x90\xe7\xf3\xf6\xe9\x62\x23\x3f\x10\x93\x18\xae\x72\x1a\x9d\xf9\x48\xcb\xcc\x5a\x65\xc7\x4a\x04\xf0\xf3\xd5\xd5\x05\x8a\x41\x08\xbc\x86\x86\x43\x51\x6c\xe0\x46\x57\xf6\x44\x40\x0d\xfb\xff\xa2\xc3\x7c\x3d\x39\x84\xdc\x09\x22\x64\x4f\x12\xd9\xba\xaa\xf6\xe3\xbd\x56\xdd\x91\x25\x6a\x14\x9c\x89\x34\x8e\x31\xdf\xee\x15\x7e\x2f\x39\x81\x15\x2a\x28\x95\x66\x51\xf5\xfd\x83\xc5\xfe\x15\x07\xcf\xf7\x08\xee\x1d\x8e\xb6\xc5\x52\xcc\x8c\x5a\x93\x66\xc5\xd8\x79\x38\x46\xd6\xa7\x88\x37\xc9\x2e\xe3\xd2\xa5\x7b\x4b\x3a\xdc\xa1\xdc\x9e\x29\xf1\x8c\x8a\x99\x16\x47\x8d\xd4\x78\x8b\xf6\x1c\xe9\x71\x54\x1b\x69\xa8\x4a\x93\x37\xe5\xb2\x2c\x4f\x0c\x92\xab\xa0\x73\x32\x72\x59\xd3\xf0\x2d\x8d\xed\xca\x37\x16\x19\x9e\xdb\x1c\xab\x17\x49\xc3\x0f\x37\xdc\x88\xb1\xb4\xd4\x42\xcb\x58\x5e\x6a\x52\x0b\x15\x10\x0a\xb0\x04\xe7\xf8\x58\x32\x16\x01\xa6\xcd\x01\xb2\xc2\x69\x24\x35\x38\x6f\x30\x6a\xae\x1b\xb4\x71\xaa\xad\x1d\xa0\xd6\x20\x2d\x8b\x3c\xc6\x82\x62\x27\x34\x6d\x15\x84\x7b\x43\xb1\x35\x78\xa6\x24\x77\x28\xc1\x6e\xfc\xe9\x48\x74\xf4\x15\xe3\xe1\x84\x42\x88\x40\x7a\x26\x49\x3b\x48\xb1\xa4\x19\x8e\x0c\xa7\xb5\x01\x6c\x0c\x97\x61\x8a\xc2\x32\xd8\x8c\x44\x69\x24\xbf\x65\x1d\x74\xd6\xe5\x44\xef\xec\x48\x5e\xb7\x8a\xa3\x29\x8e\x41\x64\xce\x1f\x88\xdc\x00\x47\x4b\x40\x98\x6e\xd1\x0d\x8e\x48\x98\x63\x5c\x21\xb1\x4c\x05\x0a\x58\x98\xc5\x6d\x4f\x0a\x77\x53\x4f\x8b\xc4\x44\x1f\xb2\xdf\x8d\x3b\xea\x9f\xfe\xf6\xf2\xc5\xff\x5d\x7f\xfe\x9f\xfb\x67\x8f\xff\xf3\xe9\x69\xd1\xfe\xb3\xc7\xfd\x3c\xf8\x3f\x71\x94\x82\x23\xd1\x72\x00\xb7\x42\x99\x6c\xc0\x60\x7b\x0f\x79\xea\xa8\x53\x4b\x56\x31\xfa\x0b\x52\x9f\x96\xdb\xcd\x2f\xd7\x67\xcd\x04\x19\x85\xfe\xdb\x02\x9a\x59\x03\xad\x63\x3c\xea\xff\x2e\x18\xfd\x00\xd9\xe2\x56\x60\x59\x93\xb9\xb6\xb2\x3e\x3c\x2c\xab\x0f\xa7\xb2\x89\x43\xc7\xf6\xd5\xce\x2e\xad\xa6\xa9\xed\xa6\xc6\x5a\xb4\xa6\x67\xdf\x8c\x26\x7b\x50\x5a\x91\x08\x2e\x6d\xd4\x3a\xc1\x9d\xf2\xdb\xde\x1e\xb2\x2c\x6c\xa5\x64\xc9\x16\xb4\x90\xaa\x4a\xb7\x0c\xde\x13\xc3\x2a\x9a\x11\x9b\x7a\x1b\x3d\x95\x97\x37\x31\x6b\x69\x7e\x34\xc0\x67\x1f\x66\x19\x49\xef\xf1\x25\xf5\xac\x0e\xea\x0a\x28\x8d\x4d\x7e\xd9\x57\x4b\x49\xe5\xc6\xb3\x25\xfd\xe6\x57\x42\x25\xac\xcd\xcf\x36\x74\x8e\xca\x24\x47\xe7\x80\xa8\x92\x72\xbd\x3d\x84\x2d\x65\xe2\x82\x1a\x9c\xc4\x44\x92\x1b\x10\x79\x8a\xc4\x4a\x2f\x60\x51\x04\x81\xaa\xf0\xa3\x95\x27\xd7\x12\x7b\xa3\x96\x03\x45\x96\xc1\x8a\x07\xc9\xb2\xb0\x95\x52\x8c\xef\x48\x9c\xc6\x7e\x94\xca\xc2\x0e\x07\x12\x44\xa9\x20\x37\xf0\xae\x0f\x49\xa3\x96\x9d\x4b\x42\x7b\x70\x59\x14\xee\xe0\xb2\x0f\x49\xa3\x96\x4b\x97\xbf\x00\x5d\x4b\x4f\xfc\xbb\x2b\xee\x92\xb9\x17\xb5\xaa\xb8\x0b\x97\x17\x9b\x43\xfd\xd6\xc2\xb2\xc2\x2e\x29\xcf\xfd\x87\x4a\x55\xda\x25\x63\x1f\x5a\x65\x69\x2b\x2d\x3d\x67\xe9\x41\xae\x5e\xc1\x6e\x2b\xd4\xdb\x3e\xa8\xd3\x26\xd2\x48\x92\x24\xca\x61\x86\x8f\x8c\xbb\xf2\x8e\x91\xdf\x1f\x06\x19\x33\xf3\x03\x4d\xba\xcd\xe2\x2d\xfb\x69\xe9\x16\x15\x13\xd5\x56\x85\x4e\x3c\x5b\x8a\xbf\x25\x72\x83\xee\x5e\x20\x22\xf2\xc8\xaa\x7b\xdb\x8e\xe4\x29\x58\xca\x38\xb7\x3f\x2e\x59\xb8\xbd\xa8\x16\x16\xf7\xdb\x79\x51\x9f\x5a\xb4\x8d\x87\x3a\x6e\xbc\x3e\xc5\xb4\xcd\x58\xf9\xf5\x3c\xb9\x6f\x49\xaf\x57\xc1\xfa\x1c\x5d\x6d\x88\x8a\x8b\xd3\x28\xcc\xb7\xef\x10\x8a\x4a\x74\xa9\x4a\xa7\x62\xbf\x0d\x76\x23\x6f\x59\xd9\x31\xee\x40\x11\xfb\x28\xec\x8d\x22\x1c\x13\x5a\x64\x94\x23\x16\x60\xbb\xd2\x7c\xa0\x98\xb2\xe5\x6e\xbc\x54\x33\xe0\x3e\xb9\x52\x17\xdb\xb7\x1b\xc8\x12\x20\x8c\x23\xca\x64\x7e\x78\xa3\x62\x5b\x75\x56\xd9\x9e\x2a\x91\x27\xb0\x70\x34\x1f\x90\x89\xb5\x86\x73\x7e\x71\xda\x1e\xfb\x3a\x72\xdc\x5e\x79\x88\xcb\x74\x79\xd9\x64\xe4\xd4\xc2\x9e\xce\xb1\xfe\x85\x5a\xc0\xe9\x0c\x34\x3d\xd0\x43\xce\xa1\x36\x39\xd5\xa1\x4e\xf5\xf8\xb1\xa9\x23\x08\x75\x84\xac\x53\x6c\x3a\xc5\xa6\x53\x6c\x3a\xc5\xa6\x7f\xc5\xd8\xf4\x51\xfd\xff\x25\x4e\xfa\x33\x05\xbe\x9d\x60\xd2\x04\x93\x6a\x5f\x33\x9b\x98\x50\xd2\xe1\x50\x52\xc6\xcc\xdb\x38\x91\xdb\xe6\xaa\xa2\x8f\xa1\x6a\xa6\xd4\xc6\x56\xd6\x8c\x40\x02\x68\x48\xe8\x1a\xe1\x9a\xd9\x2e\xb7\x05\xc3\x34\xda\x2a\xbb\xcd\x12\x36\x98\x22\x50\x4c\xa1\x1b\xc5\xd5\x84\xf0\xbe\x24\x84\xf7\x2f\x22\x37\xef\x94\xd7\x9f\xa0\xde\x04\xf5\x26\xa8\x37\x41\x3d\x64\x40\x3d\xe5\xf2\xde\x60\x89\x27\xb4\x37\xa1\xbd\xda\xd7\xd2\x2c\x26\xc0\x37\x01\x3e\x1b\xef\x5f\x06\xe0\x6b\x7c\x5c\x91\x08\x26\x10\x38\x81\xc0\x09\x04\x76\x4a\x3d\x81\xc0\xbf\x12\x08\x4c\xb0\xdc\x7c\x99\x00\xd0\x75\x70\xb4\xf8\x5a\x7c\xea\xde\x3e\x39\x08\x30\x5a\x27\x35\xed\xb4\x65\xad\x69\x74\x10\x88\x79\xe2\x30\x52\x19\xd6\x04\x21\xa7\x95\xd5\x0e\x03\xf8\xda\x20\xd7\x84\xb4\x26\xa4\x35\x21\xad\x09\x69\x21\x03\x69\x51\x46\xff\xff\x18\x9b\x54\xed\x87\x47\x06\x9d\x4e\x73\x6e\x9a\xb3\xa9\xce\x83\x5e\x4b\xc6\x71\x20\x45\xd7\x72\xf5\x40\x72\x0e\x34\x6c\xf4\x6c\xf3\xba\x5e\x4b\x97\x0e\x52\xb8\xbe\x8b\x79\xa0\x10\x86\xa1\x75\xb0\x6f\xec\xc8\xf4\x3d\x4d\x7b\x86\xc2\x02\x31\x12\x51\xbf\x07\x94\xad\x10\xd6\x2e\x79\xcf\xe9\x1c\xf5\x1e\x31\x23\x5c\x18\xfb\x9c\xfb\x70\xe0\x62\xbd\xf7\xb5\x94\xcf\xf3\xf6\xfa\xc5\x4e\x9c\x85\x76\x1d\xae\x37\xbc\xde\xa3\x41\xcb\x29\xd0\x5e\x70\x67\x50\x93\x6d\x98\xa8\xd3\x67\x0f\x68\xb1\xeb\x38\x47\x07\x10\x1b\xd2\xe2\x18\x68\x6d\x40\xbb\xa3\x40\xba\x21\xf2\x8e\x81\xfb\xf6\x92\x77\x2f\x70\xe8\xdb\xb2\x36\xbf\x30\x91\xc5\x21\xe7\x45\xcc\x34\x0c\x48\x8e\xd0\xf2\x9b\x7c\x3c\xbd\x1c\x04\x3e\x07\xe8\x7c\x2f\x84\x7a\x48\x4d\x1f\xba\xe1\x76\x45\x7b\x60\xe0\x01\xca\xee\x04\xca\x31\xbe\x73\x5f\xa3\x70\x0c\xad\x1f\xa5\xf5\x76\xd5\xbb\xd2\x7e\xfb\x30\x90\xcf\xfa\x67\x7a\xe6\xc3\x37\x42\x19\xe2\xc9\x9c\x61\x4c\xe7\xd1\x77\x55\x86\x6e\x8f\x7b\x85\x42\x33\xa3\xaa\x57\xae\xfd\xd5\xcc\x9c\x56\x68\xe2\xde\x0e\xa8\x2c\xa9\xb0\x7d\xf0\x54\x2d\x80\xf2\x48\x39\x3d\x98\x1a\x6d\x0b\x9d\xba\x53\xfb\xce\xf8\xd1\x7e\xbb\x60\x4f\x06\xf5\xce\xda\xab\xeb\xca\xcb\xd5\xac\x20\xda\x72\x3b\xa2\x4b\x38\xd7\xb5\x89\xbe\x42\xd9\xb9\x73\xc4\x0c\x6d\xb7\xd9\xf8\x8d\xbd\x3e\x9c\xf5\x53\x68\x48\x14\x36\x8f\x09\xc5\x92\xf1\x21\xd1\x09\x07\x1c\xbe\xa7\x91\xf3\x6a\xc8\xc1\x57\xb0\xdd\xc5\xc6\x1d\xad\x76\x1d\xa8\x82\x0e\x4c\x38\xfe\xa5\x8c\xc5\x0a\x40\x5d\xa1\xbb\x98\xd1\xfb\x74\x61\xed\x1a\x98\xaf\x3c\x8c\x1e\xe3\xc2\x92\x29\x74\x3e\x99\xd0\xf9\x41\x50\xd0\x38\x4b\x57\x7e\x5b\x7a\x0e\xe6\xce\x4e\xd7\x19\x35\x57\xbb\x3c\x3c\xd2\x5e\x4f\x4b\x4c\xf7\x0f\x4d\x2b\x91\x5d\x94\xa6\x95\xc8\x69\x25\x72\x5a\x89\x7c\xb8\x95\xc8\x07\x80\x8c\xda\x9c\x64\x7b\xb7\x71\xdf\x57\x12\x4b\x9a\x1f\x72\x0c\x13\x03\xad\x3c\xd5\x4e\xde\x8e\x57\x13\x6d\x34\x86\xcf\x97\xe6\xa4\x68\xc4\xb0\xf6\xc9\xc2\xeb\x8d\x0b\xd7\xcd\xfe\xba\xa6\xf5\x30\xeb\x30\x33\xbe\xc7\x56\x27\xab\x08\xd9\x6d\xbb\x09\xee\x7c\x2d\xcf\xee\x87\x38\xac\xc8\xdd\x90\x9a\x58\x4a\x4e\x96\xa9\x79\x79\xf3\xde\x20\xf0\x96\xe3\x24\x19\xeb\xba\xf2\x53\x19\xab\x12\xaf\x47\xb3\xa0\x3e\xef\x9b\x8d\x6d\x6d\x7b\xde\x3b\x3b\x1a\xc0\x3f\x95\x7e\xed\x78\xfb\x76\xb8\xaf\xb3\xdd\xc5\xeb\x95\xed\x5a\x62\x41\x82\xb3\x54\x6e\x80\x4a\x92\x6f\x36\xbd\x34\xae\xde\x6f\xa4\xc0\xbc\x08\xe3\x84\xfc\x1d\xb6\xe3\xd0\x62\x38\x95\x9b\x57\xe7\x71\x12\x91\x80\xc8\x31\x69\x5e\x60\x21\x6e\x19\x0f\xc7\xa4\x79\x96\x28\x3e\x47\x54\x65\x41\x36\x08\x40\x88\x1f\x58\x08\x56\xaa\xd5\xbf\xaf\xad\x96\xd7\xd6\xcf\x87\xf5\x34\x0f\x71\x93\x6e\x26\xed\x98\x5b\x9f\x4f\xcf\x95\x34\xc6\xd7\x11\xfa\xb0\x81\x22\x1a\xdb\xdf\x8e\xdc\xc3\xb9\xf8\xdd\x5d\x3c\x74\xe6\xea\xb7\x8b\xbf\xf5\x6e\xb3\x46\x2e\x64\xf4\xab\x3c\x4e\xcf\x36\x1d\xfe\xfa\xb8\x36\xba\x8a\xd8\xad\xf6\xc6\x41\x2a\x37\x8c\x17\x0f\xda\xfe\xda\xe7\x65\xbc\x71\x2c\x36\x57\x8a\x47\x12\x4c\xf1\xbd\x77\x6b\xa4\x50\x7e\x77\x7b\x22\x60\x89\xef\xcd\xf5\xb9\x0c\x97\x79\x0d\x2b\x35\x43\xcb\x3d\x24\xf1\x78\xfc\xf8\xcb\x1f\x15\x06\xe2\x78\xd8\x51\x21\xd9\x1f\xf0\xf5\x8f\x86\xa4\x50\xfa\xb1\x47\x43\xa5\xdd\x69\x14\xe8\xa3\xc0\x86\x91\xa7\x81\x50\xb4\x7c\xc0\x81\x80\x77\x7a\x9f\xc6\xc2\xa9\x8c\x05\x33\xb0\x3b\x31\xa4\xf4\xd7\x1b\x26\x55\x97\x7c\x65\xf8\x69\x1a\x84\x8e\x41\x78\xd9\xec\xc5\x11\x16\x1e\x74\x91\xf5\x56\xf5\x57\x49\x47\x5c\x92\xa9\x1e\x99\x36\xf4\xdb\xb1\x0e\xd3\x78\x02\xb0\x9b\x25\xcb\xe9\xe9\x1d\x0d\x44\x01\x42\x08\x91\x64\xd9\xdd\x37\x08\x17\xef\xf9\xe5\x0f\xbd\x46\x91\xf5\xf9\x89\x92\x37\xdd\x89\x59\x44\x1f\x9c\xee\x34\x1e\xbe\x47\x83\x32\x72\x8e\x37\xdf\xac\x69\x38\xef\x75\xb0\xda\xdb\xac\x83\x94\x2f\x39\xa6\x62\x05\x1c\x25\x9c\x49\x16\xb0\xa8\x3c\xc7\x7e\x76\x71\x3e\x6f\xb5\x24\xe7\xe8\xb7\xb9\xc7\x6c\x43\x92\xee\x21\xd4\x17\xa1\x7f\xba\x35\xfe\xae\x39\xbc\xde\xba\x69\xd9\x8e\xe1\x62\xde\x64\x7d\x16\x88\x1b\xed\x29\x11\xfd\x4f\xa9\xff\x99\x90\xc4\xf6\xf4\xf9\x6e\xe9\x28\x23\xd7\xca\xe5\xee\xee\x9f\x63\xb1\x5b\xfb\x10\xd7\x2f\x1d\xf2\xe3\xbf\xb9\xb5\x6f\xa4\x6d\x7d\x25\x79\xfb\x24\x31\xea\x56\xbe\x5d\x53\xcd\x2d\x36\xa3\x6d\xdf\xab\x1c\xb8\x6d\x6f\xc0\x98\xa7\xdd\xaa\x86\x8c\x1d\x39\xa3\x9d\x70\x2b\x9b\x68\xd9\xfd\x33\xfe\xa9\xb6\x4a\x2e\x63\x0f\xcf\x68\x27\xd9\x4c\xb9\x46\x6d\xcb\xbe\xa1\xa8\xd6\x5f\xc6\xd6\x9f\xf1\x4f\xf4\xd4\xb4\x78\xd0\xd6\xf4\x13\x3c\x3b\xac\xd0\xdc\x90\x34\xda\xc9\xb4\x9a\x1a\x8d\xbd\x93\x87\xd4\xe2\x21\x1b\xb3\x2b\xd1\xbe\xe7\x69\xd4\x53\x67\xd5\x40\xa0\xe3\x19\x3f\x6d\x1a\xbc\x0e\x86\x3c\x10\xb4\x3d\x2a\xcd\x78\x32\xe6\xab\xbd\x36\xc9\xf4\x3a\x58\xae\xc3\xf4\x47\xea\xbf\xfb\x47\xff\x0d\x00\x00\xff\xff\xd2\x32\x5a\x28\x38\x9d\x00\x00") - -func v2SchemaJsonBytes() ([]byte, error) { - return bindataRead( - _v2SchemaJson, - "v2/schema.json", - ) -} - -func v2SchemaJson() (*asset, error) { - bytes, err := v2SchemaJsonBytes() - if err != nil { - return nil, err - } - - info := bindataFileInfo{name: "v2/schema.json", size: 40248, mode: os.FileMode(0640), modTime: time.Unix(1568964748, 0)} - a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xab, 0x88, 0x5e, 0xf, 0xbf, 0x17, 0x74, 0x0, 0xb2, 0x5a, 0x7f, 0xbc, 0x58, 0xcd, 0xc, 0x25, 0x73, 0xd5, 0x29, 0x1c, 0x7a, 0xd0, 0xce, 0x79, 0xd4, 0x89, 0x31, 0x27, 0x90, 0xf2, 0xff, 0xe6}} - return a, nil -} - -// Asset loads and returns the asset for the given name. -// It returns an error if the asset could not be found or -// could not be loaded. -func Asset(name string) ([]byte, error) { - canonicalName := strings.Replace(name, "\\", "/", -1) - if f, ok := _bindata[canonicalName]; ok { - a, err := f() - if err != nil { - return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err) - } - return a.bytes, nil - } - return nil, fmt.Errorf("Asset %s not found", name) -} - -// AssetString returns the asset contents as a string (instead of a []byte). -func AssetString(name string) (string, error) { - data, err := Asset(name) - return string(data), err -} - -// MustAsset is like Asset but panics when Asset would return an error. -// It simplifies safe initialization of global variables. -func MustAsset(name string) []byte { - a, err := Asset(name) - if err != nil { - panic("asset: Asset(" + name + "): " + err.Error()) - } - - return a -} - -// MustAssetString is like AssetString but panics when Asset would return an -// error. It simplifies safe initialization of global variables. -func MustAssetString(name string) string { - return string(MustAsset(name)) -} - -// AssetInfo loads and returns the asset info for the given name. -// It returns an error if the asset could not be found or -// could not be loaded. -func AssetInfo(name string) (os.FileInfo, error) { - canonicalName := strings.Replace(name, "\\", "/", -1) - if f, ok := _bindata[canonicalName]; ok { - a, err := f() - if err != nil { - return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err) - } - return a.info, nil - } - return nil, fmt.Errorf("AssetInfo %s not found", name) -} - -// AssetDigest returns the digest of the file with the given name. It returns an -// error if the asset could not be found or the digest could not be loaded. -func AssetDigest(name string) ([sha256.Size]byte, error) { - canonicalName := strings.Replace(name, "\\", "/", -1) - if f, ok := _bindata[canonicalName]; ok { - a, err := f() - if err != nil { - return [sha256.Size]byte{}, fmt.Errorf("AssetDigest %s can't read by error: %v", name, err) - } - return a.digest, nil - } - return [sha256.Size]byte{}, fmt.Errorf("AssetDigest %s not found", name) -} - -// Digests returns a map of all known files and their checksums. -func Digests() (map[string][sha256.Size]byte, error) { - mp := make(map[string][sha256.Size]byte, len(_bindata)) - for name := range _bindata { - a, err := _bindata[name]() - if err != nil { - return nil, err - } - mp[name] = a.digest - } - return mp, nil -} - -// AssetNames returns the names of the assets. -func AssetNames() []string { - names := make([]string, 0, len(_bindata)) - for name := range _bindata { - names = append(names, name) - } - return names -} - -// _bindata is a table, holding each asset generator, mapped to its name. -var _bindata = map[string]func() (*asset, error){ - "jsonschema-draft-04.json": jsonschemaDraft04Json, - - "v2/schema.json": v2SchemaJson, -} - -// AssetDir returns the file names below a certain -// directory embedded in the file by go-bindata. -// For example if you run go-bindata on data/... and data contains the -// following hierarchy: -// data/ -// foo.txt -// img/ -// a.png -// b.png -// then AssetDir("data") would return []string{"foo.txt", "img"}, -// AssetDir("data/img") would return []string{"a.png", "b.png"}, -// AssetDir("foo.txt") and AssetDir("notexist") would return an error, and -// AssetDir("") will return []string{"data"}. -func AssetDir(name string) ([]string, error) { - node := _bintree - if len(name) != 0 { - canonicalName := strings.Replace(name, "\\", "/", -1) - pathList := strings.Split(canonicalName, "/") - for _, p := range pathList { - node = node.Children[p] - if node == nil { - return nil, fmt.Errorf("Asset %s not found", name) - } - } - } - if node.Func != nil { - return nil, fmt.Errorf("Asset %s not found", name) - } - rv := make([]string, 0, len(node.Children)) - for childName := range node.Children { - rv = append(rv, childName) - } - return rv, nil -} - -type bintree struct { - Func func() (*asset, error) - Children map[string]*bintree -} - -var _bintree = &bintree{nil, map[string]*bintree{ - "jsonschema-draft-04.json": {jsonschemaDraft04Json, map[string]*bintree{}}, - "v2": {nil, map[string]*bintree{ - "schema.json": {v2SchemaJson, map[string]*bintree{}}, - }}, -}} - -// RestoreAsset restores an asset under the given directory. -func RestoreAsset(dir, name string) error { - data, err := Asset(name) - if err != nil { - return err - } - info, err := AssetInfo(name) - if err != nil { - return err - } - err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755)) - if err != nil { - return err - } - err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode()) - if err != nil { - return err - } - return os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime()) -} - -// RestoreAssets restores an asset under the given directory recursively. -func RestoreAssets(dir, name string) error { - children, err := AssetDir(name) - // File - if err != nil { - return RestoreAsset(dir, name) - } - // Dir - for _, child := range children { - err = RestoreAssets(dir, filepath.Join(name, child)) - if err != nil { - return err - } - } - return nil -} - -func _filePath(dir, name string) string { - canonicalName := strings.Replace(name, "\\", "/", -1) - return filepath.Join(append([]string{dir}, strings.Split(canonicalName, "/")...)...) -} diff --git a/vendor/github.com/go-openapi/spec/embed.go b/vendor/github.com/go-openapi/spec/embed.go new file mode 100644 index 000000000..1f4284750 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/embed.go @@ -0,0 +1,17 @@ +package spec + +import ( + "embed" + "path" +) + +//go:embed schemas/*.json schemas/*/*.json +var assets embed.FS + +func jsonschemaDraft04JSONBytes() ([]byte, error) { + return assets.ReadFile(path.Join("schemas", "jsonschema-draft-04.json")) +} + +func v2SchemaJSONBytes() ([]byte, error) { + return assets.ReadFile(path.Join("schemas", "v2", "schema.json")) +} diff --git a/vendor/github.com/go-openapi/spec/expander.go b/vendor/github.com/go-openapi/spec/expander.go index d4ea889d4..b81a5699a 100644 --- a/vendor/github.com/go-openapi/spec/expander.go +++ b/vendor/github.com/go-openapi/spec/expander.go @@ -27,7 +27,6 @@ // all relative $ref's will be resolved from there. // // PathLoader injects a document loading method. By default, this resolves to the function provided by the SpecLoader package variable. -// type ExpandOptions struct { RelativeBase string // the path to the root document to expand. This is a file, not a directory SkipSchemas bool // do not expand schemas, just paths, parameters and responses @@ -58,7 +57,7 @@ func ExpandSpec(spec *Swagger, options *ExpandOptions) error { if !options.SkipSchemas { for key, definition := range spec.Definitions { parentRefs := make([]string, 0, 10) - parentRefs = append(parentRefs, fmt.Sprintf("#/definitions/%s", key)) + parentRefs = append(parentRefs, "#/definitions/"+key) def, err := expandSchema(definition, parentRefs, resolver, specBasePath) if resolver.shouldStopOnError(err) { @@ -103,15 +102,21 @@ func ExpandSpec(spec *Swagger, options *ExpandOptions) error { // baseForRoot loads in the cache the root document and produces a fake ".root" base path entry // for further $ref resolution -// -// Setting the cache is optional and this parameter may safely be left to nil. func baseForRoot(root interface{}, cache ResolutionCache) string { - if root == nil { - return "" - } - // cache the root document to resolve $ref's normalizedBase := normalizeBase(rootBase) + + if root == nil { + // ensure that we never leave a nil root: always cache the root base pseudo-document + cachedRoot, found := cache.Get(normalizedBase) + if found && cachedRoot != nil { + // the cache is already preloaded with a root + return normalizedBase + } + + root = map[string]interface{}{} + } + cache.Set(normalizedBase, root) return normalizedBase @@ -208,7 +213,19 @@ func expandSchema(target Schema, parentRefs []string, resolver *schemaLoader, ba } if target.Ref.String() != "" { - return expandSchemaRef(target, parentRefs, resolver, basePath) + if !resolver.options.SkipSchemas { + return expandSchemaRef(target, parentRefs, resolver, basePath) + } + + // when "expand" with SkipSchema, we just rebase the existing $ref without replacing + // the full schema. + rebasedRef, err := NewRef(normalizeURI(target.Ref.String(), basePath)) + if err != nil { + return nil, err + } + target.Ref = denormalizeRef(&rebasedRef, resolver.context.basePath, resolver.context.rootID) + + return &target, nil } for k := range target.Definitions { @@ -520,21 +537,25 @@ func getRefAndSchema(input interface{}) (*Ref, *Schema, error) { } func expandParameterOrResponse(input interface{}, resolver *schemaLoader, basePath string) error { - ref, _, err := getRefAndSchema(input) + ref, sch, err := getRefAndSchema(input) if err != nil { return err } - if ref == nil { + if ref == nil && sch == nil { // nothing to do return nil } parentRefs := make([]string, 0, 10) - if err = resolver.deref(input, parentRefs, basePath); resolver.shouldStopOnError(err) { - return err + if ref != nil { + // dereference this $ref + if err = resolver.deref(input, parentRefs, basePath); resolver.shouldStopOnError(err) { + return err + } + + ref, sch, _ = getRefAndSchema(input) } - ref, sch, _ := getRefAndSchema(input) if ref.String() != "" { transitiveResolver := resolver.transitiveResolver(basePath, *ref) basePath = resolver.updateBasePath(transitiveResolver, basePath) @@ -546,6 +567,7 @@ func expandParameterOrResponse(input interface{}, resolver *schemaLoader, basePa if ref != nil { *ref = Ref{} } + return nil } @@ -555,38 +577,29 @@ func expandParameterOrResponse(input interface{}, resolver *schemaLoader, basePa return ern } - switch { - case resolver.isCircular(&rebasedRef, basePath, parentRefs...): + if resolver.isCircular(&rebasedRef, basePath, parentRefs...) { // this is a circular $ref: stop expansion if !resolver.options.AbsoluteCircularRef { sch.Ref = denormalizeRef(&rebasedRef, resolver.context.basePath, resolver.context.rootID) } else { sch.Ref = rebasedRef } - case !resolver.options.SkipSchemas: - // schema expanded to a $ref in another root - sch.Ref = rebasedRef - debugLog("rebased to: %s", sch.Ref.String()) - default: - // skip schema expansion but rebase $ref to schema - sch.Ref = denormalizeRef(&rebasedRef, resolver.context.basePath, resolver.context.rootID) } } + // $ref expansion or rebasing is performed by expandSchema below if ref != nil { *ref = Ref{} } // expand schema - if !resolver.options.SkipSchemas { - s, err := expandSchema(*sch, parentRefs, resolver, basePath) - if resolver.shouldStopOnError(err) { - return err - } - if s == nil { - // guard for when continuing on error - return nil - } + // yes, we do it even if options.SkipSchema is true: we have to go down that rabbit hole and rebase nested $ref) + s, err := expandSchema(*sch, parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return err + } + + if s != nil { // guard for when continuing on error *sch = *s } diff --git a/vendor/github.com/go-openapi/spec/normalizer_nonwindows.go b/vendor/github.com/go-openapi/spec/normalizer_nonwindows.go index 2df072315..f19f1a8fb 100644 --- a/vendor/github.com/go-openapi/spec/normalizer_nonwindows.go +++ b/vendor/github.com/go-openapi/spec/normalizer_nonwindows.go @@ -40,5 +40,5 @@ func repairURI(in string) (*url.URL, string) { return u, "" } -func fixWindowsURI(u *url.URL, in string) { +func fixWindowsURI(_ *url.URL, _ string) { } diff --git a/vendor/github.com/go-openapi/spec/operation.go b/vendor/github.com/go-openapi/spec/operation.go index 995ce6acb..a69cca881 100644 --- a/vendor/github.com/go-openapi/spec/operation.go +++ b/vendor/github.com/go-openapi/spec/operation.go @@ -217,9 +217,12 @@ func (o *Operation) AddParam(param *Parameter) *Operation { for i, p := range o.Parameters { if p.Name == param.Name && p.In == param.In { - params := append(o.Parameters[:i], *param) + params := make([]Parameter, 0, len(o.Parameters)+1) + params = append(params, o.Parameters[:i]...) + params = append(params, *param) params = append(params, o.Parameters[i+1:]...) o.Parameters = params + return o } } diff --git a/vendor/github.com/go-openapi/spec/parameter.go b/vendor/github.com/go-openapi/spec/parameter.go index 2b2b89b67..bd4f1cdb0 100644 --- a/vendor/github.com/go-openapi/spec/parameter.go +++ b/vendor/github.com/go-openapi/spec/parameter.go @@ -84,27 +84,27 @@ type ParamProps struct { // Parameter a unique parameter is defined by a combination of a [name](#parameterName) and [location](#parameterIn). // // There are five possible parameter types. -// * Path - Used together with [Path Templating](#pathTemplating), where the parameter value is actually part -// of the operation's URL. This does not include the host or base path of the API. For example, in `/items/{itemId}`, -// the path parameter is `itemId`. -// * Query - Parameters that are appended to the URL. For example, in `/items?id=###`, the query parameter is `id`. -// * Header - Custom headers that are expected as part of the request. -// * Body - The payload that's appended to the HTTP request. Since there can only be one payload, there can only be -// _one_ body parameter. The name of the body parameter has no effect on the parameter itself and is used for -// documentation purposes only. Since Form parameters are also in the payload, body and form parameters cannot exist -// together for the same operation. -// * Form - Used to describe the payload of an HTTP request when either `application/x-www-form-urlencoded` or -// `multipart/form-data` are used as the content type of the request (in Swagger's definition, -// the [`consumes`](#operationConsumes) property of an operation). This is the only parameter type that can be used -// to send files, thus supporting the `file` type. Since form parameters are sent in the payload, they cannot be -// declared together with a body parameter for the same operation. Form parameters have a different format based on -// the content-type used (for further details, consult http://www.w3.org/TR/html401/interact/forms.html#h-17.13.4). -// * `application/x-www-form-urlencoded` - Similar to the format of Query parameters but as a payload. -// For example, `foo=1&bar=swagger` - both `foo` and `bar` are form parameters. This is normally used for simple -// parameters that are being transferred. -// * `multipart/form-data` - each parameter takes a section in the payload with an internal header. -// For example, for the header `Content-Disposition: form-data; name="submit-name"` the name of the parameter is -// `submit-name`. This type of form parameters is more commonly used for file transfers. +// - Path - Used together with [Path Templating](#pathTemplating), where the parameter value is actually part +// of the operation's URL. This does not include the host or base path of the API. For example, in `/items/{itemId}`, +// the path parameter is `itemId`. +// - Query - Parameters that are appended to the URL. For example, in `/items?id=###`, the query parameter is `id`. +// - Header - Custom headers that are expected as part of the request. +// - Body - The payload that's appended to the HTTP request. Since there can only be one payload, there can only be +// _one_ body parameter. The name of the body parameter has no effect on the parameter itself and is used for +// documentation purposes only. Since Form parameters are also in the payload, body and form parameters cannot exist +// together for the same operation. +// - Form - Used to describe the payload of an HTTP request when either `application/x-www-form-urlencoded` or +// `multipart/form-data` are used as the content type of the request (in Swagger's definition, +// the [`consumes`](#operationConsumes) property of an operation). This is the only parameter type that can be used +// to send files, thus supporting the `file` type. Since form parameters are sent in the payload, they cannot be +// declared together with a body parameter for the same operation. Form parameters have a different format based on +// the content-type used (for further details, consult http://www.w3.org/TR/html401/interact/forms.html#h-17.13.4). +// - `application/x-www-form-urlencoded` - Similar to the format of Query parameters but as a payload. +// For example, `foo=1&bar=swagger` - both `foo` and `bar` are form parameters. This is normally used for simple +// parameters that are being transferred. +// - `multipart/form-data` - each parameter takes a section in the payload with an internal header. +// For example, for the header `Content-Disposition: form-data; name="submit-name"` the name of the parameter is +// `submit-name`. This type of form parameters is more commonly used for file transfers. // // For more information: http://goo.gl/8us55a#parameterObject type Parameter struct { diff --git a/vendor/github.com/go-openapi/spec/schema_loader.go b/vendor/github.com/go-openapi/spec/schema_loader.go index b81175afd..0059b99ae 100644 --- a/vendor/github.com/go-openapi/spec/schema_loader.go +++ b/vendor/github.com/go-openapi/spec/schema_loader.go @@ -168,14 +168,7 @@ func (r *schemaLoader) load(refURL *url.URL) (interface{}, url.URL, bool, error) normalized := normalizeBase(pth) debugLog("loading doc from: %s", normalized) - unescaped, err := url.PathUnescape(normalized) - if err != nil { - return nil, url.URL{}, false, err - } - - u := url.URL{Path: unescaped} - - data, fromCache := r.cache.Get(u.RequestURI()) + data, fromCache := r.cache.Get(normalized) if fromCache { return data, toFetch, fromCache, nil } diff --git a/vendor/github.com/go-openapi/spec/schemas/jsonschema-draft-04.json b/vendor/github.com/go-openapi/spec/schemas/jsonschema-draft-04.json new file mode 100644 index 000000000..bcbb84743 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/schemas/jsonschema-draft-04.json @@ -0,0 +1,149 @@ +{ + "id": "http://json-schema.org/draft-04/schema#", + "$schema": "http://json-schema.org/draft-04/schema#", + "description": "Core schema meta-schema", + "definitions": { + "schemaArray": { + "type": "array", + "minItems": 1, + "items": { "$ref": "#" } + }, + "positiveInteger": { + "type": "integer", + "minimum": 0 + }, + "positiveIntegerDefault0": { + "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ] + }, + "simpleTypes": { + "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ] + }, + "stringArray": { + "type": "array", + "items": { "type": "string" }, + "minItems": 1, + "uniqueItems": true + } + }, + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "$schema": { + "type": "string" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "default": {}, + "multipleOf": { + "type": "number", + "minimum": 0, + "exclusiveMinimum": true + }, + "maximum": { + "type": "number" + }, + "exclusiveMaximum": { + "type": "boolean", + "default": false + }, + "minimum": { + "type": "number" + }, + "exclusiveMinimum": { + "type": "boolean", + "default": false + }, + "maxLength": { "$ref": "#/definitions/positiveInteger" }, + "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "pattern": { + "type": "string", + "format": "regex" + }, + "additionalItems": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "items": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/schemaArray" } + ], + "default": {} + }, + "maxItems": { "$ref": "#/definitions/positiveInteger" }, + "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "uniqueItems": { + "type": "boolean", + "default": false + }, + "maxProperties": { "$ref": "#/definitions/positiveInteger" }, + "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "required": { "$ref": "#/definitions/stringArray" }, + "additionalProperties": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "definitions": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "properties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "patternProperties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "dependencies": { + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/stringArray" } + ] + } + }, + "enum": { + "type": "array", + "minItems": 1, + "uniqueItems": true + }, + "type": { + "anyOf": [ + { "$ref": "#/definitions/simpleTypes" }, + { + "type": "array", + "items": { "$ref": "#/definitions/simpleTypes" }, + "minItems": 1, + "uniqueItems": true + } + ] + }, + "format": { "type": "string" }, + "allOf": { "$ref": "#/definitions/schemaArray" }, + "anyOf": { "$ref": "#/definitions/schemaArray" }, + "oneOf": { "$ref": "#/definitions/schemaArray" }, + "not": { "$ref": "#" } + }, + "dependencies": { + "exclusiveMaximum": [ "maximum" ], + "exclusiveMinimum": [ "minimum" ] + }, + "default": {} +} diff --git a/vendor/github.com/go-openapi/spec/schemas/v2/schema.json b/vendor/github.com/go-openapi/spec/schemas/v2/schema.json new file mode 100644 index 000000000..ebe10ed32 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/schemas/v2/schema.json @@ -0,0 +1,1607 @@ +{ + "title": "A JSON Schema for Swagger 2.0 API.", + "id": "http://swagger.io/v2/schema.json#", + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "required": [ + "swagger", + "info", + "paths" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "swagger": { + "type": "string", + "enum": [ + "2.0" + ], + "description": "The Swagger version of this document." + }, + "info": { + "$ref": "#/definitions/info" + }, + "host": { + "type": "string", + "pattern": "^[^{}/ :\\\\]+(?::\\d+)?$", + "description": "The host (name or ip) of the API. Example: 'swagger.io'" + }, + "basePath": { + "type": "string", + "pattern": "^/", + "description": "The base path to the API. Example: '/api'." + }, + "schemes": { + "$ref": "#/definitions/schemesList" + }, + "consumes": { + "description": "A list of MIME types accepted by the API.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "produces": { + "description": "A list of MIME types the API can produce.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "paths": { + "$ref": "#/definitions/paths" + }, + "definitions": { + "$ref": "#/definitions/definitions" + }, + "parameters": { + "$ref": "#/definitions/parameterDefinitions" + }, + "responses": { + "$ref": "#/definitions/responseDefinitions" + }, + "security": { + "$ref": "#/definitions/security" + }, + "securityDefinitions": { + "$ref": "#/definitions/securityDefinitions" + }, + "tags": { + "type": "array", + "items": { + "$ref": "#/definitions/tag" + }, + "uniqueItems": true + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + } + }, + "definitions": { + "info": { + "type": "object", + "description": "General information about the API.", + "required": [ + "version", + "title" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "title": { + "type": "string", + "description": "A unique and precise title of the API." + }, + "version": { + "type": "string", + "description": "A semantic version number of the API." + }, + "description": { + "type": "string", + "description": "A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed." + }, + "termsOfService": { + "type": "string", + "description": "The terms of service for the API." + }, + "contact": { + "$ref": "#/definitions/contact" + }, + "license": { + "$ref": "#/definitions/license" + } + } + }, + "contact": { + "type": "object", + "description": "Contact information for the owners of the API.", + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The identifying name of the contact person/organization." + }, + "url": { + "type": "string", + "description": "The URL pointing to the contact information.", + "format": "uri" + }, + "email": { + "type": "string", + "description": "The email address of the contact person/organization.", + "format": "email" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "license": { + "type": "object", + "required": [ + "name" + ], + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The name of the license type. It's encouraged to use an OSI compatible license." + }, + "url": { + "type": "string", + "description": "The URL pointing to the license.", + "format": "uri" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "paths": { + "type": "object", + "description": "Relative paths to the individual endpoints. They must be relative to the 'basePath'.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + }, + "^/": { + "$ref": "#/definitions/pathItem" + } + }, + "additionalProperties": false + }, + "definitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schema" + }, + "description": "One or more JSON objects describing the schemas being consumed and produced by the API." + }, + "parameterDefinitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/parameter" + }, + "description": "One or more JSON representations for parameters" + }, + "responseDefinitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/response" + }, + "description": "One or more JSON representations for responses" + }, + "externalDocs": { + "type": "object", + "additionalProperties": false, + "description": "information about external documentation", + "required": [ + "url" + ], + "properties": { + "description": { + "type": "string" + }, + "url": { + "type": "string", + "format": "uri" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "examples": { + "type": "object", + "additionalProperties": true + }, + "mimeType": { + "type": "string", + "description": "The MIME type of the HTTP message." + }, + "operation": { + "type": "object", + "required": [ + "responses" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "tags": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "summary": { + "type": "string", + "description": "A brief summary of the operation." + }, + "description": { + "type": "string", + "description": "A longer description of the operation, GitHub Flavored Markdown is allowed." + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "operationId": { + "type": "string", + "description": "A unique identifier of the operation." + }, + "produces": { + "description": "A list of MIME types the API can produce.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "consumes": { + "description": "A list of MIME types the API can consume.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "parameters": { + "$ref": "#/definitions/parametersList" + }, + "responses": { + "$ref": "#/definitions/responses" + }, + "schemes": { + "$ref": "#/definitions/schemesList" + }, + "deprecated": { + "type": "boolean", + "default": false + }, + "security": { + "$ref": "#/definitions/security" + } + } + }, + "pathItem": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "$ref": { + "type": "string" + }, + "get": { + "$ref": "#/definitions/operation" + }, + "put": { + "$ref": "#/definitions/operation" + }, + "post": { + "$ref": "#/definitions/operation" + }, + "delete": { + "$ref": "#/definitions/operation" + }, + "options": { + "$ref": "#/definitions/operation" + }, + "head": { + "$ref": "#/definitions/operation" + }, + "patch": { + "$ref": "#/definitions/operation" + }, + "parameters": { + "$ref": "#/definitions/parametersList" + } + } + }, + "responses": { + "type": "object", + "description": "Response objects names can either be any valid HTTP status code or 'default'.", + "minProperties": 1, + "additionalProperties": false, + "patternProperties": { + "^([0-9]{3})$|^(default)$": { + "$ref": "#/definitions/responseValue" + }, + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "not": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + } + }, + "responseValue": { + "oneOf": [ + { + "$ref": "#/definitions/response" + }, + { + "$ref": "#/definitions/jsonReference" + } + ] + }, + "response": { + "type": "object", + "required": [ + "description" + ], + "properties": { + "description": { + "type": "string" + }, + "schema": { + "oneOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "$ref": "#/definitions/fileSchema" + } + ] + }, + "headers": { + "$ref": "#/definitions/headers" + }, + "examples": { + "$ref": "#/definitions/examples" + } + }, + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "headers": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/header" + } + }, + "header": { + "type": "object", + "additionalProperties": false, + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "string", + "number", + "integer", + "boolean", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "vendorExtension": { + "description": "Any property starting with x- is valid.", + "additionalProperties": true, + "additionalItems": true + }, + "bodyParameter": { + "type": "object", + "required": [ + "name", + "in", + "schema" + ], + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "body" + ] + }, + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "schema": { + "$ref": "#/definitions/schema" + } + }, + "additionalProperties": false + }, + "headerParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "header" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "queryParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "query" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "allowEmptyValue": { + "type": "boolean", + "default": false, + "description": "allows sending a parameter by name only or with an empty value." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormatWithMulti" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "formDataParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "formData" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "allowEmptyValue": { + "type": "boolean", + "default": false, + "description": "allows sending a parameter by name only or with an empty value." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array", + "file" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormatWithMulti" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "pathParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "required": [ + "required" + ], + "properties": { + "required": { + "type": "boolean", + "enum": [ + true + ], + "description": "Determines whether or not this parameter is required or optional." + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "path" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "nonBodyParameter": { + "type": "object", + "required": [ + "name", + "in", + "type" + ], + "oneOf": [ + { + "$ref": "#/definitions/headerParameterSubSchema" + }, + { + "$ref": "#/definitions/formDataParameterSubSchema" + }, + { + "$ref": "#/definitions/queryParameterSubSchema" + }, + { + "$ref": "#/definitions/pathParameterSubSchema" + } + ] + }, + "parameter": { + "oneOf": [ + { + "$ref": "#/definitions/bodyParameter" + }, + { + "$ref": "#/definitions/nonBodyParameter" + } + ] + }, + "schema": { + "type": "object", + "description": "A deterministic version of a JSON Schema object.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "$ref": { + "type": "string" + }, + "format": { + "type": "string" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "multipleOf": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" + }, + "maximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" + }, + "exclusiveMaximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" + }, + "minimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" + }, + "exclusiveMinimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" + }, + "maxLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "pattern": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" + }, + "maxItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "uniqueItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" + }, + "maxProperties": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minProperties": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "required": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray" + }, + "enum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" + }, + "additionalProperties": { + "anyOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "type": "boolean" + } + ], + "default": {} + }, + "type": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/type" + }, + "items": { + "anyOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "type": "array", + "minItems": 1, + "items": { + "$ref": "#/definitions/schema" + } + } + ], + "default": {} + }, + "allOf": { + "type": "array", + "minItems": 1, + "items": { + "$ref": "#/definitions/schema" + } + }, + "properties": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schema" + }, + "default": {} + }, + "discriminator": { + "type": "string" + }, + "readOnly": { + "type": "boolean", + "default": false + }, + "xml": { + "$ref": "#/definitions/xml" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "example": {} + }, + "additionalProperties": false + }, + "fileSchema": { + "type": "object", + "description": "A deterministic version of a JSON Schema object.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "required": [ + "type" + ], + "properties": { + "format": { + "type": "string" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "required": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray" + }, + "type": { + "type": "string", + "enum": [ + "file" + ] + }, + "readOnly": { + "type": "boolean", + "default": false + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "example": {} + }, + "additionalProperties": false + }, + "primitivesItems": { + "type": "object", + "additionalProperties": false, + "properties": { + "type": { + "type": "string", + "enum": [ + "string", + "number", + "integer", + "boolean", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "security": { + "type": "array", + "items": { + "$ref": "#/definitions/securityRequirement" + }, + "uniqueItems": true + }, + "securityRequirement": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + } + }, + "xml": { + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "type": "string" + }, + "namespace": { + "type": "string" + }, + "prefix": { + "type": "string" + }, + "attribute": { + "type": "boolean", + "default": false + }, + "wrapped": { + "type": "boolean", + "default": false + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "tag": { + "type": "object", + "additionalProperties": false, + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "securityDefinitions": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "$ref": "#/definitions/basicAuthenticationSecurity" + }, + { + "$ref": "#/definitions/apiKeySecurity" + }, + { + "$ref": "#/definitions/oauth2ImplicitSecurity" + }, + { + "$ref": "#/definitions/oauth2PasswordSecurity" + }, + { + "$ref": "#/definitions/oauth2ApplicationSecurity" + }, + { + "$ref": "#/definitions/oauth2AccessCodeSecurity" + } + ] + } + }, + "basicAuthenticationSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "basic" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "apiKeySecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "name", + "in" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "apiKey" + ] + }, + "name": { + "type": "string" + }, + "in": { + "type": "string", + "enum": [ + "header", + "query" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2ImplicitSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "authorizationUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "implicit" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "authorizationUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2PasswordSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "password" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2ApplicationSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "application" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2AccessCodeSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "authorizationUrl", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "accessCode" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "authorizationUrl": { + "type": "string", + "format": "uri" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2Scopes": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "mediaTypeList": { + "type": "array", + "items": { + "$ref": "#/definitions/mimeType" + }, + "uniqueItems": true + }, + "parametersList": { + "type": "array", + "description": "The parameters needed to send a valid API call.", + "additionalItems": false, + "items": { + "oneOf": [ + { + "$ref": "#/definitions/parameter" + }, + { + "$ref": "#/definitions/jsonReference" + } + ] + }, + "uniqueItems": true + }, + "schemesList": { + "type": "array", + "description": "The transfer protocol of the API.", + "items": { + "type": "string", + "enum": [ + "http", + "https", + "ws", + "wss" + ] + }, + "uniqueItems": true + }, + "collectionFormat": { + "type": "string", + "enum": [ + "csv", + "ssv", + "tsv", + "pipes" + ], + "default": "csv" + }, + "collectionFormatWithMulti": { + "type": "string", + "enum": [ + "csv", + "ssv", + "tsv", + "pipes", + "multi" + ], + "default": "csv" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "multipleOf": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" + }, + "maximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" + }, + "exclusiveMaximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" + }, + "minimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" + }, + "exclusiveMinimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" + }, + "maxLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "pattern": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" + }, + "maxItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "uniqueItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" + }, + "enum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" + }, + "jsonReference": { + "type": "object", + "required": [ + "$ref" + ], + "additionalProperties": false, + "properties": { + "$ref": { + "type": "string" + } + } + } + } +} diff --git a/vendor/github.com/go-openapi/spec/spec.go b/vendor/github.com/go-openapi/spec/spec.go index 7d38b6e62..876aa1275 100644 --- a/vendor/github.com/go-openapi/spec/spec.go +++ b/vendor/github.com/go-openapi/spec/spec.go @@ -26,7 +26,7 @@ const ( // SwaggerSchemaURL the url for the swagger 2.0 schema to validate specs SwaggerSchemaURL = "http://swagger.io/v2/schema.json#" - // JSONSchemaURL the url for the json schema schema + // JSONSchemaURL the url for the json schema JSONSchemaURL = "http://json-schema.org/draft-04/schema#" ) @@ -41,7 +41,7 @@ func MustLoadJSONSchemaDraft04() *Schema { // JSONSchemaDraft04 loads the json schema document for json shema draft04 func JSONSchemaDraft04() (*Schema, error) { - b, err := Asset("jsonschema-draft-04.json") + b, err := jsonschemaDraft04JSONBytes() if err != nil { return nil, err } @@ -65,7 +65,7 @@ func MustLoadSwagger20Schema() *Schema { // Swagger20Schema loads the swagger 2.0 schema from the embedded assets func Swagger20Schema() (*Schema, error) { - b, err := Asset("v2/schema.json") + b, err := v2SchemaJSONBytes() if err != nil { return nil, err } diff --git a/vendor/github.com/go-openapi/spec/swagger.go b/vendor/github.com/go-openapi/spec/swagger.go index 44722ffd5..1590fd175 100644 --- a/vendor/github.com/go-openapi/spec/swagger.go +++ b/vendor/github.com/go-openapi/spec/swagger.go @@ -253,7 +253,7 @@ func (s SchemaOrBool) MarshalJSON() ([]byte, error) { // UnmarshalJSON converts this bool or schema object from a JSON structure func (s *SchemaOrBool) UnmarshalJSON(data []byte) error { var nw SchemaOrBool - if len(data) >= 4 { + if len(data) > 0 { if data[0] == '{' { var sch Schema if err := json.Unmarshal(data, &sch); err != nil { @@ -261,7 +261,7 @@ func (s *SchemaOrBool) UnmarshalJSON(data []byte) error { } nw.Schema = &sch } - nw.Allows = !(data[0] == 'f' && data[1] == 'a' && data[2] == 'l' && data[3] == 's' && data[4] == 'e') + nw.Allows = !bytes.Equal(data, []byte("false")) } *s = nw return nil diff --git a/vendor/github.com/go-openapi/spec/url_go18.go b/vendor/github.com/go-openapi/spec/url_go18.go deleted file mode 100644 index 60b785153..000000000 --- a/vendor/github.com/go-openapi/spec/url_go18.go +++ /dev/null @@ -1,8 +0,0 @@ -//go:build !go1.19 -// +build !go1.19 - -package spec - -import "net/url" - -var parseURL = url.Parse diff --git a/vendor/github.com/go-openapi/spec/url_go19.go b/vendor/github.com/go-openapi/spec/url_go19.go index 392e3e639..5bdfe40bc 100644 --- a/vendor/github.com/go-openapi/spec/url_go19.go +++ b/vendor/github.com/go-openapi/spec/url_go19.go @@ -1,6 +1,3 @@ -//go:build go1.19 -// +build go1.19 - package spec import "net/url" diff --git a/vendor/github.com/go-openapi/strfmt/.golangci.yml b/vendor/github.com/go-openapi/strfmt/.golangci.yml index be4899cb1..22f8d21cc 100644 --- a/vendor/github.com/go-openapi/strfmt/.golangci.yml +++ b/vendor/github.com/go-openapi/strfmt/.golangci.yml @@ -4,56 +4,58 @@ linters-settings: golint: min-confidence: 0 gocyclo: - min-complexity: 31 + min-complexity: 45 maligned: suggest-new: true dupl: - threshold: 100 + threshold: 200 goconst: min-len: 2 - min-occurrences: 4 + min-occurrences: 3 linters: - enable: - - revive - - goimports - - gosec + enable-all: true + disable: + - maligned - unparam - - unconvert - - predeclared - - prealloc - - misspell - - # disable: - # - maligned - # - lll - # - gochecknoinits - # - gochecknoglobals - # - godox - # - gocognit - # - whitespace - # - wsl - # - funlen - # - wrapcheck - # - testpackage - # - nlreturn - # - gofumpt - # - goerr113 - # - gci - # - gomnd - # - godot - # - exhaustivestruct - # - paralleltest - # - varnamelen - # - ireturn - # - exhaustruct - # #- thelper - -issues: - exclude-rules: - - path: bson.go - text: "should be .*ObjectID" - linters: - - golint - - stylecheck - + - lll + - gochecknoinits + - gochecknoglobals + - funlen + - godox + - gocognit + - whitespace + - wsl + - wrapcheck + - testpackage + - nlreturn + - gomnd + - exhaustivestruct + - goerr113 + - errorlint + - nestif + - godot + - gofumpt + - paralleltest + - tparallel + - thelper + - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/strfmt/README.md b/vendor/github.com/go-openapi/strfmt/README.md index 0cf89d776..f6b39c6c5 100644 --- a/vendor/github.com/go-openapi/strfmt/README.md +++ b/vendor/github.com/go-openapi/strfmt/README.md @@ -1,8 +1,7 @@ -# Strfmt [![Build Status](https://travis-ci.org/go-openapi/strfmt.svg?branch=master)](https://travis-ci.org/go-openapi/strfmt) [![codecov](https://codecov.io/gh/go-openapi/strfmt/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/strfmt) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) - +# Strfmt [![Build Status](https://github.com/go-openapi/strfmt/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/strfmt/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/strfmt/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/strfmt) +[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) [![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/strfmt/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/strfmt?status.svg)](http://godoc.org/github.com/go-openapi/strfmt) -[![GolangCI](https://golangci.com/badges/github.com/go-openapi/strfmt.svg)](https://golangci.com) [![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/strfmt)](https://goreportcard.com/report/github.com/go-openapi/strfmt) This package exposes a registry of data types to support string formats in the go-openapi toolkit. diff --git a/vendor/github.com/go-openapi/strfmt/bson.go b/vendor/github.com/go-openapi/strfmt/bson.go index a8a3604a2..cfa9a526f 100644 --- a/vendor/github.com/go-openapi/strfmt/bson.go +++ b/vendor/github.com/go-openapi/strfmt/bson.go @@ -39,10 +39,10 @@ func IsBSONObjectID(str string) bool { // ObjectId represents a BSON object ID (alias to go.mongodb.org/mongo-driver/bson/primitive.ObjectID) // // swagger:strfmt bsonobjectid -type ObjectId bsonprim.ObjectID //nolint:revive +type ObjectId bsonprim.ObjectID //nolint:revive,stylecheck // NewObjectId creates a ObjectId from a Hex String -func NewObjectId(hex string) ObjectId { //nolint:revive +func NewObjectId(hex string) ObjectId { //nolint:revive,stylecheck oid, err := bsonprim.ObjectIDFromHex(hex) if err != nil { panic(err) @@ -135,7 +135,7 @@ func (id *ObjectId) UnmarshalBSON(data []byte) error { // BSON document if the error is nil. func (id ObjectId) MarshalBSONValue() (bsontype.Type, []byte, error) { oid := bsonprim.ObjectID(id) - return bsontype.ObjectID, oid[:], nil + return bson.TypeObjectID, oid[:], nil } // UnmarshalBSONValue is an interface implemented by types that can unmarshal a diff --git a/vendor/github.com/go-openapi/strfmt/default.go b/vendor/github.com/go-openapi/strfmt/default.go index a89a4de3f..281371406 100644 --- a/vendor/github.com/go-openapi/strfmt/default.go +++ b/vendor/github.com/go-openapi/strfmt/default.go @@ -25,6 +25,7 @@ "strings" "github.com/asaskevich/govalidator" + "github.com/google/uuid" "go.mongodb.org/mongo-driver/bson" ) @@ -57,24 +58,35 @@ // - long top-level domain names (e.g. example.london) are permitted // - symbol unicode points are permitted (e.g. emoji) (not for top-level domain) HostnamePattern = `^([a-zA-Z0-9\p{S}\p{L}]((-?[a-zA-Z0-9\p{S}\p{L}]{0,62})?)|([a-zA-Z0-9\p{S}\p{L}](([a-zA-Z0-9-\p{S}\p{L}]{0,61}[a-zA-Z0-9\p{S}\p{L}])?)(\.)){1,}([a-zA-Z\p{L}]){2,63})$` - // UUIDPattern Regex for UUID that allows uppercase - UUIDPattern = `(?i)^[0-9a-f]{8}-?[0-9a-f]{4}-?[0-9a-f]{4}-?[0-9a-f]{4}-?[0-9a-f]{12}$` - // UUID3Pattern Regex for UUID3 that allows uppercase - UUID3Pattern = `(?i)^[0-9a-f]{8}-?[0-9a-f]{4}-?3[0-9a-f]{3}-?[0-9a-f]{4}-?[0-9a-f]{12}$` - // UUID4Pattern Regex for UUID4 that allows uppercase - UUID4Pattern = `(?i)^[0-9a-f]{8}-?[0-9a-f]{4}-?4[0-9a-f]{3}-?[89ab][0-9a-f]{3}-?[0-9a-f]{12}$` - // UUID5Pattern Regex for UUID5 that allows uppercase - UUID5Pattern = `(?i)^[0-9a-f]{8}-?[0-9a-f]{4}-?5[0-9a-f]{3}-?[89ab][0-9a-f]{3}-?[0-9a-f]{12}$` + // json null type jsonNull = "null" ) +const ( + // UUIDPattern Regex for UUID that allows uppercase + // + // Deprecated: strfmt no longer uses regular expressions to validate UUIDs. + UUIDPattern = `(?i)(^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$)|(^[0-9a-f]{32}$)` + + // UUID3Pattern Regex for UUID3 that allows uppercase + // + // Deprecated: strfmt no longer uses regular expressions to validate UUIDs. + UUID3Pattern = `(?i)(^[0-9a-f]{8}-[0-9a-f]{4}-3[0-9a-f]{3}-[0-9a-f]{4}-[0-9a-f]{12}$)|(^[0-9a-f]{12}3[0-9a-f]{3}?[0-9a-f]{16}$)` + + // UUID4Pattern Regex for UUID4 that allows uppercase + // + // Deprecated: strfmt no longer uses regular expressions to validate UUIDs. + UUID4Pattern = `(?i)(^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$)|(^[0-9a-f]{12}4[0-9a-f]{3}[89ab][0-9a-f]{15}$)` + + // UUID5Pattern Regex for UUID5 that allows uppercase + // + // Deprecated: strfmt no longer uses regular expressions to validate UUIDs. + UUID5Pattern = `(?i)(^[0-9a-f]{8}-[0-9a-f]{4}-5[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$)|(^[0-9a-f]{12}5[0-9a-f]{3}[89ab][0-9a-f]{15}$)` +) + var ( rxHostname = regexp.MustCompile(HostnamePattern) - rxUUID = regexp.MustCompile(UUIDPattern) - rxUUID3 = regexp.MustCompile(UUID3Pattern) - rxUUID4 = regexp.MustCompile(UUID4Pattern) - rxUUID5 = regexp.MustCompile(UUID5Pattern) ) // IsHostname returns true when the string is a valid hostname @@ -99,24 +111,28 @@ func IsHostname(str string) bool { return valid } -// IsUUID returns true is the string matches a UUID, upper case is allowed +// IsUUID returns true is the string matches a UUID (in any version, including v6 and v7), upper case is allowed func IsUUID(str string) bool { - return rxUUID.MatchString(str) + _, err := uuid.Parse(str) + return err == nil } -// IsUUID3 returns true is the string matches a UUID, upper case is allowed +// IsUUID3 returns true is the string matches a UUID v3, upper case is allowed func IsUUID3(str string) bool { - return rxUUID3.MatchString(str) + id, err := uuid.Parse(str) + return err == nil && id.Version() == uuid.Version(3) } -// IsUUID4 returns true is the string matches a UUID, upper case is allowed +// IsUUID4 returns true is the string matches a UUID v4, upper case is allowed func IsUUID4(str string) bool { - return rxUUID4.MatchString(str) + id, err := uuid.Parse(str) + return err == nil && id.Version() == uuid.Version(4) } -// IsUUID5 returns true is the string matches a UUID, upper case is allowed +// IsUUID5 returns true is the string matches a UUID v5, upper case is allowed func IsUUID5(str string) bool { - return rxUUID5.MatchString(str) + id, err := uuid.Parse(str) + return err == nil && id.Version() == uuid.Version(5) } // IsEmail validates an email address. diff --git a/vendor/github.com/go-openapi/strfmt/format.go b/vendor/github.com/go-openapi/strfmt/format.go index ad3b3c355..888e107c3 100644 --- a/vendor/github.com/go-openapi/strfmt/format.go +++ b/vendor/github.com/go-openapi/strfmt/format.go @@ -16,6 +16,7 @@ import ( "encoding" + stderrors "errors" "fmt" "reflect" "strings" @@ -94,7 +95,7 @@ func NewSeededFormats(seeds []knownFormat, normalizer NameNormalizer) Registry { } // MapStructureHookFunc is a decode hook function for mapstructure -func (f *defaultFormats) MapStructureHookFunc() mapstructure.DecodeHookFunc { //nolint:gocyclo,cyclop +func (f *defaultFormats) MapStructureHookFunc() mapstructure.DecodeHookFunc { return func(from reflect.Type, to reflect.Type, obj interface{}) (interface{}, error) { if from.Kind() != reflect.String { return obj, nil @@ -117,7 +118,7 @@ func (f *defaultFormats) MapStructureHookFunc() mapstructure.DecodeHookFunc { // case "datetime": input := data if len(input) == 0 { - return nil, fmt.Errorf("empty string is an invalid datetime format") + return nil, stderrors.New("empty string is an invalid datetime format") } return ParseDateTime(input) case "duration": diff --git a/vendor/github.com/go-openapi/strfmt/time.go b/vendor/github.com/go-openapi/strfmt/time.go index 9bef4c3b3..f08ba4da5 100644 --- a/vendor/github.com/go-openapi/strfmt/time.go +++ b/vendor/github.com/go-openapi/strfmt/time.go @@ -76,6 +76,8 @@ func IsDateTime(str string) bool { ISO8601TimeWithReducedPrecisionLocaltime = "2006-01-02T15:04" // ISO8601TimeUniversalSortableDateTimePattern represents a ISO8601 universal sortable date time pattern. ISO8601TimeUniversalSortableDateTimePattern = "2006-01-02 15:04:05" + // short form of ISO8601TimeUniversalSortableDateTimePattern + ISO8601TimeUniversalSortableDateTimePatternShortForm = "2006-01-02" // DateTimePattern pattern to match for the date-time format from http://tools.ietf.org/html/rfc3339#section-5.6 DateTimePattern = `^([0-9]{2}):([0-9]{2}):([0-9]{2})(.[0-9]+)?(z|([+-][0-9]{2}:[0-9]{2}))$` ) @@ -84,7 +86,7 @@ func IsDateTime(str string) bool { rxDateTime = regexp.MustCompile(DateTimePattern) // DateTimeFormats is the collection of formats used by ParseDateTime() - DateTimeFormats = []string{RFC3339Micro, RFC3339MicroNoColon, RFC3339Millis, RFC3339MillisNoColon, time.RFC3339, time.RFC3339Nano, ISO8601LocalTime, ISO8601TimeWithReducedPrecision, ISO8601TimeWithReducedPrecisionLocaltime, ISO8601TimeUniversalSortableDateTimePattern} + DateTimeFormats = []string{RFC3339Micro, RFC3339MicroNoColon, RFC3339Millis, RFC3339MillisNoColon, time.RFC3339, time.RFC3339Nano, ISO8601LocalTime, ISO8601TimeWithReducedPrecision, ISO8601TimeWithReducedPrecisionLocaltime, ISO8601TimeUniversalSortableDateTimePattern, ISO8601TimeUniversalSortableDateTimePatternShortForm} // MarshalFormat sets the time resolution format used for marshaling time (set to milliseconds) MarshalFormat = RFC3339Millis @@ -245,7 +247,7 @@ func (t DateTime) MarshalBSONValue() (bsontype.Type, []byte, error) { buf := make([]byte, 8) binary.LittleEndian.PutUint64(buf, uint64(i64)) - return bsontype.DateTime, buf, nil + return bson.TypeDateTime, buf, nil } // UnmarshalBSONValue is an interface implemented by types that can unmarshal a @@ -253,7 +255,7 @@ func (t DateTime) MarshalBSONValue() (bsontype.Type, []byte, error) { // assumed to be valid. UnmarshalBSONValue must copy the BSON value bytes if it // wishes to retain the data after returning. func (t *DateTime) UnmarshalBSONValue(tpe bsontype.Type, data []byte) error { - if tpe == bsontype.Null { + if tpe == bson.TypeNull { *t = DateTime{} return nil } diff --git a/vendor/github.com/go-openapi/swag/.gitignore b/vendor/github.com/go-openapi/swag/.gitignore index d69b53acc..c4b1b64f0 100644 --- a/vendor/github.com/go-openapi/swag/.gitignore +++ b/vendor/github.com/go-openapi/swag/.gitignore @@ -2,3 +2,4 @@ secrets.yml vendor Godeps .idea +*.out diff --git a/vendor/github.com/go-openapi/swag/.golangci.yml b/vendor/github.com/go-openapi/swag/.golangci.yml index bf503e400..80e2be004 100644 --- a/vendor/github.com/go-openapi/swag/.golangci.yml +++ b/vendor/github.com/go-openapi/swag/.golangci.yml @@ -4,14 +4,14 @@ linters-settings: golint: min-confidence: 0 gocyclo: - min-complexity: 25 + min-complexity: 45 maligned: suggest-new: true dupl: - threshold: 100 + threshold: 200 goconst: min-len: 3 - min-occurrences: 2 + min-occurrences: 3 linters: enable-all: true @@ -20,35 +20,41 @@ linters: - lll - gochecknoinits - gochecknoglobals - - nlreturn - - testpackage + - funlen + - godox + - gocognit + - whitespace + - wsl - wrapcheck + - testpackage + - nlreturn - gomnd - - exhaustive - exhaustivestruct - goerr113 - - wsl - - whitespace - - gofumpt - - godot + - errorlint - nestif - - godox - - funlen - - gci - - gocognit + - godot + - gofumpt - paralleltest + - tparallel - thelper - ifshort - - gomoddirectives - - cyclop - - forcetypeassert - - ireturn - - tagliatelle - - varnamelen - - goimports - - tenv - - golint - exhaustruct - - nilnil + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint - nosnakecase diff --git a/vendor/github.com/go-openapi/swag/BENCHMARK.md b/vendor/github.com/go-openapi/swag/BENCHMARK.md new file mode 100644 index 000000000..e7f28ed6b --- /dev/null +++ b/vendor/github.com/go-openapi/swag/BENCHMARK.md @@ -0,0 +1,52 @@ +# Benchmarks + +## Name mangling utilities + +```bash +go test -bench XXX -run XXX -benchtime 30s +``` + +### Benchmarks at b3e7a5386f996177e4808f11acb2aa93a0f660df + +``` +goos: linux +goarch: amd64 +pkg: github.com/go-openapi/swag +cpu: Intel(R) Core(TM) i5-6200U CPU @ 2.30GHz +BenchmarkToXXXName/ToGoName-4 862623 44101 ns/op 10450 B/op 732 allocs/op +BenchmarkToXXXName/ToVarName-4 853656 40728 ns/op 10468 B/op 734 allocs/op +BenchmarkToXXXName/ToFileName-4 1268312 27813 ns/op 9785 B/op 617 allocs/op +BenchmarkToXXXName/ToCommandName-4 1276322 27903 ns/op 9785 B/op 617 allocs/op +BenchmarkToXXXName/ToHumanNameLower-4 895334 40354 ns/op 10472 B/op 731 allocs/op +BenchmarkToXXXName/ToHumanNameTitle-4 882441 40678 ns/op 10566 B/op 749 allocs/op +``` + +### Benchmarks after PR #79 + +~ x10 performance improvement and ~ /100 memory allocations. + +``` +goos: linux +goarch: amd64 +pkg: github.com/go-openapi/swag +cpu: Intel(R) Core(TM) i5-6200U CPU @ 2.30GHz +BenchmarkToXXXName/ToGoName-4 9595830 3991 ns/op 42 B/op 5 allocs/op +BenchmarkToXXXName/ToVarName-4 9194276 3984 ns/op 62 B/op 7 allocs/op +BenchmarkToXXXName/ToFileName-4 17002711 2123 ns/op 147 B/op 7 allocs/op +BenchmarkToXXXName/ToCommandName-4 16772926 2111 ns/op 147 B/op 7 allocs/op +BenchmarkToXXXName/ToHumanNameLower-4 9788331 3749 ns/op 92 B/op 6 allocs/op +BenchmarkToXXXName/ToHumanNameTitle-4 9188260 3941 ns/op 104 B/op 6 allocs/op +``` + +``` +goos: linux +goarch: amd64 +pkg: github.com/go-openapi/swag +cpu: AMD Ryzen 7 5800X 8-Core Processor +BenchmarkToXXXName/ToGoName-16 18527378 1972 ns/op 42 B/op 5 allocs/op +BenchmarkToXXXName/ToVarName-16 15552692 2093 ns/op 62 B/op 7 allocs/op +BenchmarkToXXXName/ToFileName-16 32161176 1117 ns/op 147 B/op 7 allocs/op +BenchmarkToXXXName/ToCommandName-16 32256634 1137 ns/op 147 B/op 7 allocs/op +BenchmarkToXXXName/ToHumanNameLower-16 18599661 1946 ns/op 92 B/op 6 allocs/op +BenchmarkToXXXName/ToHumanNameTitle-16 17581353 2054 ns/op 105 B/op 6 allocs/op +``` diff --git a/vendor/github.com/go-openapi/swag/README.md b/vendor/github.com/go-openapi/swag/README.md index 217f6fa50..a72922299 100644 --- a/vendor/github.com/go-openapi/swag/README.md +++ b/vendor/github.com/go-openapi/swag/README.md @@ -1,7 +1,8 @@ -# Swag [![Build Status](https://travis-ci.org/go-openapi/swag.svg?branch=master)](https://travis-ci.org/go-openapi/swag) [![codecov](https://codecov.io/gh/go-openapi/swag/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/swag) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +# Swag [![Build Status](https://github.com/go-openapi/swag/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/swag/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/swag/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/swag) +[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) [![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/swag/master/LICENSE) -[![GoDoc](https://godoc.org/github.com/go-openapi/swag?status.svg)](http://godoc.org/github.com/go-openapi/swag) +[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/swag.svg)](https://pkg.go.dev/github.com/go-openapi/swag) [![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/swag)](https://goreportcard.com/report/github.com/go-openapi/swag) Contains a bunch of helper functions for go-openapi and go-swagger projects. @@ -18,4 +19,5 @@ You may also use it standalone for your projects. This repo has only few dependencies outside of the standard library: -* YAML utilities depend on gopkg.in/yaml.v2 +* YAML utilities depend on `gopkg.in/yaml.v3` +* `github.com/mailru/easyjson v0.7.7` diff --git a/vendor/github.com/go-openapi/swag/initialism_index.go b/vendor/github.com/go-openapi/swag/initialism_index.go new file mode 100644 index 000000000..20a359bb6 --- /dev/null +++ b/vendor/github.com/go-openapi/swag/initialism_index.go @@ -0,0 +1,202 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package swag + +import ( + "sort" + "strings" + "sync" +) + +var ( + // commonInitialisms are common acronyms that are kept as whole uppercased words. + commonInitialisms *indexOfInitialisms + + // initialisms is a slice of sorted initialisms + initialisms []string + + // a copy of initialisms pre-baked as []rune + initialismsRunes [][]rune + initialismsUpperCased [][]rune + + isInitialism func(string) bool + + maxAllocMatches int +) + +func init() { + // Taken from https://github.com/golang/lint/blob/3390df4df2787994aea98de825b964ac7944b817/lint.go#L732-L769 + configuredInitialisms := map[string]bool{ + "ACL": true, + "API": true, + "ASCII": true, + "CPU": true, + "CSS": true, + "DNS": true, + "EOF": true, + "GUID": true, + "HTML": true, + "HTTPS": true, + "HTTP": true, + "ID": true, + "IP": true, + "IPv4": true, + "IPv6": true, + "JSON": true, + "LHS": true, + "OAI": true, + "QPS": true, + "RAM": true, + "RHS": true, + "RPC": true, + "SLA": true, + "SMTP": true, + "SQL": true, + "SSH": true, + "TCP": true, + "TLS": true, + "TTL": true, + "UDP": true, + "UI": true, + "UID": true, + "UUID": true, + "URI": true, + "URL": true, + "UTF8": true, + "VM": true, + "XML": true, + "XMPP": true, + "XSRF": true, + "XSS": true, + } + + // a thread-safe index of initialisms + commonInitialisms = newIndexOfInitialisms().load(configuredInitialisms) + initialisms = commonInitialisms.sorted() + initialismsRunes = asRunes(initialisms) + initialismsUpperCased = asUpperCased(initialisms) + maxAllocMatches = maxAllocHeuristic(initialismsRunes) + + // a test function + isInitialism = commonInitialisms.isInitialism +} + +func asRunes(in []string) [][]rune { + out := make([][]rune, len(in)) + for i, initialism := range in { + out[i] = []rune(initialism) + } + + return out +} + +func asUpperCased(in []string) [][]rune { + out := make([][]rune, len(in)) + + for i, initialism := range in { + out[i] = []rune(upper(trim(initialism))) + } + + return out +} + +func maxAllocHeuristic(in [][]rune) int { + heuristic := make(map[rune]int) + for _, initialism := range in { + heuristic[initialism[0]]++ + } + + var maxAlloc int + for _, val := range heuristic { + if val > maxAlloc { + maxAlloc = val + } + } + + return maxAlloc +} + +// AddInitialisms add additional initialisms +func AddInitialisms(words ...string) { + for _, word := range words { + // commonInitialisms[upper(word)] = true + commonInitialisms.add(upper(word)) + } + // sort again + initialisms = commonInitialisms.sorted() + initialismsRunes = asRunes(initialisms) + initialismsUpperCased = asUpperCased(initialisms) +} + +// indexOfInitialisms is a thread-safe implementation of the sorted index of initialisms. +// Since go1.9, this may be implemented with sync.Map. +type indexOfInitialisms struct { + sortMutex *sync.Mutex + index *sync.Map +} + +func newIndexOfInitialisms() *indexOfInitialisms { + return &indexOfInitialisms{ + sortMutex: new(sync.Mutex), + index: new(sync.Map), + } +} + +func (m *indexOfInitialisms) load(initial map[string]bool) *indexOfInitialisms { + m.sortMutex.Lock() + defer m.sortMutex.Unlock() + for k, v := range initial { + m.index.Store(k, v) + } + return m +} + +func (m *indexOfInitialisms) isInitialism(key string) bool { + _, ok := m.index.Load(key) + return ok +} + +func (m *indexOfInitialisms) add(key string) *indexOfInitialisms { + m.index.Store(key, true) + return m +} + +func (m *indexOfInitialisms) sorted() (result []string) { + m.sortMutex.Lock() + defer m.sortMutex.Unlock() + m.index.Range(func(key, _ interface{}) bool { + k := key.(string) + result = append(result, k) + return true + }) + sort.Sort(sort.Reverse(byInitialism(result))) + return +} + +type byInitialism []string + +func (s byInitialism) Len() int { + return len(s) +} +func (s byInitialism) Swap(i, j int) { + s[i], s[j] = s[j], s[i] +} +func (s byInitialism) Less(i, j int) bool { + if len(s[i]) != len(s[j]) { + return len(s[i]) < len(s[j]) + } + + return strings.Compare(s[i], s[j]) > 0 +} diff --git a/vendor/github.com/go-openapi/swag/loading.go b/vendor/github.com/go-openapi/swag/loading.go index 00038c377..783442fdd 100644 --- a/vendor/github.com/go-openapi/swag/loading.go +++ b/vendor/github.com/go-openapi/swag/loading.go @@ -21,6 +21,7 @@ "net/http" "net/url" "os" + "path" "path/filepath" "runtime" "strings" @@ -40,43 +41,97 @@ var LoadHTTPCustomHeaders = map[string]string{} // LoadFromFileOrHTTP loads the bytes from a file or a remote http server based on the path passed in -func LoadFromFileOrHTTP(path string) ([]byte, error) { - return LoadStrategy(path, os.ReadFile, loadHTTPBytes(LoadHTTPTimeout))(path) +func LoadFromFileOrHTTP(pth string) ([]byte, error) { + return LoadStrategy(pth, os.ReadFile, loadHTTPBytes(LoadHTTPTimeout))(pth) } // LoadFromFileOrHTTPWithTimeout loads the bytes from a file or a remote http server based on the path passed in // timeout arg allows for per request overriding of the request timeout -func LoadFromFileOrHTTPWithTimeout(path string, timeout time.Duration) ([]byte, error) { - return LoadStrategy(path, os.ReadFile, loadHTTPBytes(timeout))(path) +func LoadFromFileOrHTTPWithTimeout(pth string, timeout time.Duration) ([]byte, error) { + return LoadStrategy(pth, os.ReadFile, loadHTTPBytes(timeout))(pth) } -// LoadStrategy returns a loader function for a given path or uri -func LoadStrategy(path string, local, remote func(string) ([]byte, error)) func(string) ([]byte, error) { - if strings.HasPrefix(path, "http") { +// LoadStrategy returns a loader function for a given path or URI. +// +// The load strategy returns the remote load for any path starting with `http`. +// So this works for any URI with a scheme `http` or `https`. +// +// The fallback strategy is to call the local loader. +// +// The local loader takes a local file system path (absolute or relative) as argument, +// or alternatively a `file://...` URI, **without host** (see also below for windows). +// +// There are a few liberalities, initially intended to be tolerant regarding the URI syntax, +// especially on windows. +// +// Before the local loader is called, the given path is transformed: +// - percent-encoded characters are unescaped +// - simple paths (e.g. `./folder/file`) are passed as-is +// - on windows, occurrences of `/` are replaced by `\`, so providing a relative path such a `folder/file` works too. +// +// For paths provided as URIs with the "file" scheme, please note that: +// - `file://` is simply stripped. +// This means that the host part of the URI is not parsed at all. +// For example, `file:///folder/file" becomes "/folder/file`, +// but `file://localhost/folder/file` becomes `localhost/folder/file` on unix systems. +// Similarly, `file://./folder/file` yields `./folder/file`. +// - on windows, `file://...` can take a host so as to specify an UNC share location. +// +// Reminder about windows-specifics: +// - `file://host/folder/file` becomes an UNC path like `\\host\folder\file` (no port specification is supported) +// - `file:///c:/folder/file` becomes `C:\folder\file` +// - `file://c:/folder/file` is tolerated (without leading `/`) and becomes `c:\folder\file` +func LoadStrategy(pth string, local, remote func(string) ([]byte, error)) func(string) ([]byte, error) { + if strings.HasPrefix(pth, "http") { return remote } - return func(pth string) ([]byte, error) { - upth, err := pathUnescape(pth) + + return func(p string) ([]byte, error) { + upth, err := url.PathUnescape(p) if err != nil { return nil, err } - if strings.HasPrefix(pth, `file://`) { - if runtime.GOOS == "windows" { - // support for canonical file URIs on windows. - // Zero tolerance here for dodgy URIs. - u, _ := url.Parse(upth) - if u.Host != "" { - // assume UNC name (volume share) - // file://host/share/folder\... ==> \\host\share\path\folder - // NOTE: UNC port not yet supported - upth = strings.Join([]string{`\`, u.Host, u.Path}, `\`) - } else { - // file:///c:/folder/... ==> just remove the leading slash - upth = strings.TrimPrefix(upth, `file:///`) - } - } else { - upth = strings.TrimPrefix(upth, `file://`) + if !strings.HasPrefix(p, `file://`) { + // regular file path provided: just normalize slashes + return local(filepath.FromSlash(upth)) + } + + if runtime.GOOS != "windows" { + // crude processing: this leaves full URIs with a host with a (mostly) unexpected result + upth = strings.TrimPrefix(upth, `file://`) + + return local(filepath.FromSlash(upth)) + } + + // windows-only pre-processing of file://... URIs + + // support for canonical file URIs on windows. + u, err := url.Parse(filepath.ToSlash(upth)) + if err != nil { + return nil, err + } + + if u.Host != "" { + // assume UNC name (volume share) + // NOTE: UNC port not yet supported + + // when the "host" segment is a drive letter: + // file://C:/folder/... => C:\folder + upth = path.Clean(strings.Join([]string{u.Host, u.Path}, `/`)) + if !strings.HasSuffix(u.Host, ":") && u.Host[0] != '.' { + // tolerance: if we have a leading dot, this can't be a host + // file://host/share/folder\... ==> \\host\share\path\folder + upth = "//" + upth + } + } else { + // no host, let's figure out if this is a drive letter + upth = strings.TrimPrefix(upth, `file://`) + first, _, _ := strings.Cut(strings.TrimPrefix(u.Path, "/"), "/") + if strings.HasSuffix(first, ":") { + // drive letter in the first segment: + // file:///c:/folder/... ==> strip the leading slash + upth = strings.TrimPrefix(upth, `/`) } } diff --git a/vendor/github.com/go-openapi/swag/name_lexem.go b/vendor/github.com/go-openapi/swag/name_lexem.go index aa7f6a9bb..8bb64ac32 100644 --- a/vendor/github.com/go-openapi/swag/name_lexem.go +++ b/vendor/github.com/go-openapi/swag/name_lexem.go @@ -14,74 +14,80 @@ package swag -import "unicode" +import ( + "unicode" + "unicode/utf8" +) type ( - nameLexem interface { - GetUnsafeGoName() string - GetOriginal() string - IsInitialism() bool - } + lexemKind uint8 - initialismNameLexem struct { + nameLexem struct { original string matchedInitialism string - } - - casualNameLexem struct { - original string + kind lexemKind } ) -func newInitialismNameLexem(original, matchedInitialism string) *initialismNameLexem { - return &initialismNameLexem{ +const ( + lexemKindCasualName lexemKind = iota + lexemKindInitialismName +) + +func newInitialismNameLexem(original, matchedInitialism string) nameLexem { + return nameLexem{ + kind: lexemKindInitialismName, original: original, matchedInitialism: matchedInitialism, } } -func newCasualNameLexem(original string) *casualNameLexem { - return &casualNameLexem{ +func newCasualNameLexem(original string) nameLexem { + return nameLexem{ + kind: lexemKindCasualName, original: original, } } -func (l *initialismNameLexem) GetUnsafeGoName() string { - return l.matchedInitialism -} +func (l nameLexem) GetUnsafeGoName() string { + if l.kind == lexemKindInitialismName { + return l.matchedInitialism + } + + var ( + first rune + rest string + ) -func (l *casualNameLexem) GetUnsafeGoName() string { - var first rune - var rest string for i, orig := range l.original { if i == 0 { first = orig continue } + if i > 0 { rest = l.original[i:] break } } + if len(l.original) > 1 { - return string(unicode.ToUpper(first)) + lower(rest) + b := poolOfBuffers.BorrowBuffer(utf8.UTFMax + len(rest)) + defer func() { + poolOfBuffers.RedeemBuffer(b) + }() + b.WriteRune(unicode.ToUpper(first)) + b.WriteString(lower(rest)) + return b.String() } return l.original } -func (l *initialismNameLexem) GetOriginal() string { +func (l nameLexem) GetOriginal() string { return l.original } -func (l *casualNameLexem) GetOriginal() string { - return l.original -} - -func (l *initialismNameLexem) IsInitialism() bool { - return true -} - -func (l *casualNameLexem) IsInitialism() bool { - return false +func (l nameLexem) IsInitialism() bool { + return l.kind == lexemKindInitialismName } diff --git a/vendor/github.com/go-openapi/swag/post_go18.go b/vendor/github.com/go-openapi/swag/post_go18.go deleted file mode 100644 index f5228b82c..000000000 --- a/vendor/github.com/go-openapi/swag/post_go18.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2015 go-swagger maintainers -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build go1.8 -// +build go1.8 - -package swag - -import "net/url" - -func pathUnescape(path string) (string, error) { - return url.PathUnescape(path) -} diff --git a/vendor/github.com/go-openapi/swag/post_go19.go b/vendor/github.com/go-openapi/swag/post_go19.go deleted file mode 100644 index 7c7da9c08..000000000 --- a/vendor/github.com/go-openapi/swag/post_go19.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2015 go-swagger maintainers -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build go1.9 -// +build go1.9 - -package swag - -import ( - "sort" - "sync" -) - -// indexOfInitialisms is a thread-safe implementation of the sorted index of initialisms. -// Since go1.9, this may be implemented with sync.Map. -type indexOfInitialisms struct { - sortMutex *sync.Mutex - index *sync.Map -} - -func newIndexOfInitialisms() *indexOfInitialisms { - return &indexOfInitialisms{ - sortMutex: new(sync.Mutex), - index: new(sync.Map), - } -} - -func (m *indexOfInitialisms) load(initial map[string]bool) *indexOfInitialisms { - m.sortMutex.Lock() - defer m.sortMutex.Unlock() - for k, v := range initial { - m.index.Store(k, v) - } - return m -} - -func (m *indexOfInitialisms) isInitialism(key string) bool { - _, ok := m.index.Load(key) - return ok -} - -func (m *indexOfInitialisms) add(key string) *indexOfInitialisms { - m.index.Store(key, true) - return m -} - -func (m *indexOfInitialisms) sorted() (result []string) { - m.sortMutex.Lock() - defer m.sortMutex.Unlock() - m.index.Range(func(key, value interface{}) bool { - k := key.(string) - result = append(result, k) - return true - }) - sort.Sort(sort.Reverse(byInitialism(result))) - return -} diff --git a/vendor/github.com/go-openapi/swag/pre_go18.go b/vendor/github.com/go-openapi/swag/pre_go18.go deleted file mode 100644 index 2757d9b95..000000000 --- a/vendor/github.com/go-openapi/swag/pre_go18.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2015 go-swagger maintainers -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build !go1.8 -// +build !go1.8 - -package swag - -import "net/url" - -func pathUnescape(path string) (string, error) { - return url.QueryUnescape(path) -} diff --git a/vendor/github.com/go-openapi/swag/pre_go19.go b/vendor/github.com/go-openapi/swag/pre_go19.go deleted file mode 100644 index 0565db377..000000000 --- a/vendor/github.com/go-openapi/swag/pre_go19.go +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2015 go-swagger maintainers -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build !go1.9 -// +build !go1.9 - -package swag - -import ( - "sort" - "sync" -) - -// indexOfInitialisms is a thread-safe implementation of the sorted index of initialisms. -// Before go1.9, this may be implemented with a mutex on the map. -type indexOfInitialisms struct { - getMutex *sync.Mutex - index map[string]bool -} - -func newIndexOfInitialisms() *indexOfInitialisms { - return &indexOfInitialisms{ - getMutex: new(sync.Mutex), - index: make(map[string]bool, 50), - } -} - -func (m *indexOfInitialisms) load(initial map[string]bool) *indexOfInitialisms { - m.getMutex.Lock() - defer m.getMutex.Unlock() - for k, v := range initial { - m.index[k] = v - } - return m -} - -func (m *indexOfInitialisms) isInitialism(key string) bool { - m.getMutex.Lock() - defer m.getMutex.Unlock() - _, ok := m.index[key] - return ok -} - -func (m *indexOfInitialisms) add(key string) *indexOfInitialisms { - m.getMutex.Lock() - defer m.getMutex.Unlock() - m.index[key] = true - return m -} - -func (m *indexOfInitialisms) sorted() (result []string) { - m.getMutex.Lock() - defer m.getMutex.Unlock() - for k := range m.index { - result = append(result, k) - } - sort.Sort(sort.Reverse(byInitialism(result))) - return -} diff --git a/vendor/github.com/go-openapi/swag/split.go b/vendor/github.com/go-openapi/swag/split.go index a1825fb7d..274727a86 100644 --- a/vendor/github.com/go-openapi/swag/split.go +++ b/vendor/github.com/go-openapi/swag/split.go @@ -15,124 +15,269 @@ package swag import ( + "bytes" + "sync" "unicode" + "unicode/utf8" ) -var nameReplaceTable = map[rune]string{ - '@': "At ", - '&': "And ", - '|': "Pipe ", - '$': "Dollar ", - '!': "Bang ", - '-': "", - '_': "", -} - type ( splitter struct { - postSplitInitialismCheck bool initialisms []string + initialismsRunes [][]rune + initialismsUpperCased [][]rune // initialisms cached in their trimmed, upper-cased version + postSplitInitialismCheck bool } - splitterOption func(*splitter) *splitter + splitterOption func(*splitter) + + initialismMatch struct { + body []rune + start, end int + complete bool + } + initialismMatches []initialismMatch ) -// split calls the splitter; splitter provides more control and post options -func split(str string) []string { - lexems := newSplitter().split(str) - result := make([]string, 0, len(lexems)) +type ( + // memory pools of temporary objects. + // + // These are used to recycle temporarily allocated objects + // and relieve the GC from undue pressure. - for _, lexem := range lexems { + matchesPool struct { + *sync.Pool + } + + buffersPool struct { + *sync.Pool + } + + lexemsPool struct { + *sync.Pool + } + + splittersPool struct { + *sync.Pool + } +) + +var ( + // poolOfMatches holds temporary slices for recycling during the initialism match process + poolOfMatches = matchesPool{ + Pool: &sync.Pool{ + New: func() any { + s := make(initialismMatches, 0, maxAllocMatches) + + return &s + }, + }, + } + + poolOfBuffers = buffersPool{ + Pool: &sync.Pool{ + New: func() any { + return new(bytes.Buffer) + }, + }, + } + + poolOfLexems = lexemsPool{ + Pool: &sync.Pool{ + New: func() any { + s := make([]nameLexem, 0, maxAllocMatches) + + return &s + }, + }, + } + + poolOfSplitters = splittersPool{ + Pool: &sync.Pool{ + New: func() any { + s := newSplitter() + + return &s + }, + }, + } +) + +// nameReplaceTable finds a word representation for special characters. +func nameReplaceTable(r rune) (string, bool) { + switch r { + case '@': + return "At ", true + case '&': + return "And ", true + case '|': + return "Pipe ", true + case '$': + return "Dollar ", true + case '!': + return "Bang ", true + case '-': + return "", true + case '_': + return "", true + default: + return "", false + } +} + +// split calls the splitter. +// +// Use newSplitter for more control and options +func split(str string) []string { + s := poolOfSplitters.BorrowSplitter() + lexems := s.split(str) + result := make([]string, 0, len(*lexems)) + + for _, lexem := range *lexems { result = append(result, lexem.GetOriginal()) } + poolOfLexems.RedeemLexems(lexems) + poolOfSplitters.RedeemSplitter(s) return result } -func (s *splitter) split(str string) []nameLexem { - return s.toNameLexems(str) -} - -func newSplitter(options ...splitterOption) *splitter { - splitter := &splitter{ +func newSplitter(options ...splitterOption) splitter { + s := splitter{ postSplitInitialismCheck: false, initialisms: initialisms, + initialismsRunes: initialismsRunes, + initialismsUpperCased: initialismsUpperCased, } for _, option := range options { - splitter = option(splitter) + option(&s) } - return splitter -} - -// withPostSplitInitialismCheck allows to catch initialisms after main split process -func withPostSplitInitialismCheck(s *splitter) *splitter { - s.postSplitInitialismCheck = true return s } -type ( - initialismMatch struct { - start, end int - body []rune - complete bool - } - initialismMatches []*initialismMatch -) +// withPostSplitInitialismCheck allows to catch initialisms after main split process +func withPostSplitInitialismCheck(s *splitter) { + s.postSplitInitialismCheck = true +} -func (s *splitter) toNameLexems(name string) []nameLexem { +func (p matchesPool) BorrowMatches() *initialismMatches { + s := p.Get().(*initialismMatches) + *s = (*s)[:0] // reset slice, keep allocated capacity + + return s +} + +func (p buffersPool) BorrowBuffer(size int) *bytes.Buffer { + s := p.Get().(*bytes.Buffer) + s.Reset() + + if s.Cap() < size { + s.Grow(size) + } + + return s +} + +func (p lexemsPool) BorrowLexems() *[]nameLexem { + s := p.Get().(*[]nameLexem) + *s = (*s)[:0] // reset slice, keep allocated capacity + + return s +} + +func (p splittersPool) BorrowSplitter(options ...splitterOption) *splitter { + s := p.Get().(*splitter) + s.postSplitInitialismCheck = false // reset options + for _, apply := range options { + apply(s) + } + + return s +} + +func (p matchesPool) RedeemMatches(s *initialismMatches) { + p.Put(s) +} + +func (p buffersPool) RedeemBuffer(s *bytes.Buffer) { + p.Put(s) +} + +func (p lexemsPool) RedeemLexems(s *[]nameLexem) { + p.Put(s) +} + +func (p splittersPool) RedeemSplitter(s *splitter) { + p.Put(s) +} + +func (m initialismMatch) isZero() bool { + return m.start == 0 && m.end == 0 +} + +func (s splitter) split(name string) *[]nameLexem { nameRunes := []rune(name) matches := s.gatherInitialismMatches(nameRunes) + if matches == nil { + return poolOfLexems.BorrowLexems() + } + return s.mapMatchesToNameLexems(nameRunes, matches) } -func (s *splitter) gatherInitialismMatches(nameRunes []rune) initialismMatches { - matches := make(initialismMatches, 0) +func (s splitter) gatherInitialismMatches(nameRunes []rune) *initialismMatches { + var matches *initialismMatches for currentRunePosition, currentRune := range nameRunes { - newMatches := make(initialismMatches, 0, len(matches)) + // recycle these allocations as we loop over runes + // with such recycling, only 2 slices should be allocated per call + // instead of o(n). + newMatches := poolOfMatches.BorrowMatches() // check current initialism matches - for _, match := range matches { - if keepCompleteMatch := match.complete; keepCompleteMatch { - newMatches = append(newMatches, match) - continue - } - - // drop failed match - currentMatchRune := match.body[currentRunePosition-match.start] - if !s.initialismRuneEqual(currentMatchRune, currentRune) { - continue - } - - // try to complete ongoing match - if currentRunePosition-match.start == len(match.body)-1 { - // we are close; the next step is to check the symbol ahead - // if it is a small letter, then it is not the end of match - // but beginning of the next word - - if currentRunePosition < len(nameRunes)-1 { - nextRune := nameRunes[currentRunePosition+1] - if newWord := unicode.IsLower(nextRune); newWord { - // oh ok, it was the start of a new word - continue - } + if matches != nil { // skip first iteration + for _, match := range *matches { + if keepCompleteMatch := match.complete; keepCompleteMatch { + *newMatches = append(*newMatches, match) + continue } - match.complete = true - match.end = currentRunePosition - } + // drop failed match + currentMatchRune := match.body[currentRunePosition-match.start] + if currentMatchRune != currentRune { + continue + } - newMatches = append(newMatches, match) + // try to complete ongoing match + if currentRunePosition-match.start == len(match.body)-1 { + // we are close; the next step is to check the symbol ahead + // if it is a small letter, then it is not the end of match + // but beginning of the next word + + if currentRunePosition < len(nameRunes)-1 { + nextRune := nameRunes[currentRunePosition+1] + if newWord := unicode.IsLower(nextRune); newWord { + // oh ok, it was the start of a new word + continue + } + } + + match.complete = true + match.end = currentRunePosition + } + + *newMatches = append(*newMatches, match) + } } // check for new initialism matches - for _, initialism := range s.initialisms { - initialismRunes := []rune(initialism) - if s.initialismRuneEqual(initialismRunes[0], currentRune) { - newMatches = append(newMatches, &initialismMatch{ + for i := range s.initialisms { + initialismRunes := s.initialismsRunes[i] + if initialismRunes[0] == currentRune { + *newMatches = append(*newMatches, initialismMatch{ start: currentRunePosition, body: initialismRunes, complete: false, @@ -140,24 +285,28 @@ func (s *splitter) gatherInitialismMatches(nameRunes []rune) initialismMatches { } } + if matches != nil { + poolOfMatches.RedeemMatches(matches) + } matches = newMatches } + // up to the caller to redeem this last slice return matches } -func (s *splitter) mapMatchesToNameLexems(nameRunes []rune, matches initialismMatches) []nameLexem { - nameLexems := make([]nameLexem, 0) +func (s splitter) mapMatchesToNameLexems(nameRunes []rune, matches *initialismMatches) *[]nameLexem { + nameLexems := poolOfLexems.BorrowLexems() - var lastAcceptedMatch *initialismMatch - for _, match := range matches { + var lastAcceptedMatch initialismMatch + for _, match := range *matches { if !match.complete { continue } - if firstMatch := lastAcceptedMatch == nil; firstMatch { - nameLexems = append(nameLexems, s.breakCasualString(nameRunes[:match.start])...) - nameLexems = append(nameLexems, s.breakInitialism(string(match.body))) + if firstMatch := lastAcceptedMatch.isZero(); firstMatch { + s.appendBrokenDownCasualString(nameLexems, nameRunes[:match.start]) + *nameLexems = append(*nameLexems, s.breakInitialism(string(match.body))) lastAcceptedMatch = match @@ -169,63 +318,66 @@ func (s *splitter) mapMatchesToNameLexems(nameRunes []rune, matches initialismMa } middle := nameRunes[lastAcceptedMatch.end+1 : match.start] - nameLexems = append(nameLexems, s.breakCasualString(middle)...) - nameLexems = append(nameLexems, s.breakInitialism(string(match.body))) + s.appendBrokenDownCasualString(nameLexems, middle) + *nameLexems = append(*nameLexems, s.breakInitialism(string(match.body))) lastAcceptedMatch = match } // we have not found any accepted matches - if lastAcceptedMatch == nil { - return s.breakCasualString(nameRunes) + if lastAcceptedMatch.isZero() { + *nameLexems = (*nameLexems)[:0] + s.appendBrokenDownCasualString(nameLexems, nameRunes) + } else if lastAcceptedMatch.end+1 != len(nameRunes) { + rest := nameRunes[lastAcceptedMatch.end+1:] + s.appendBrokenDownCasualString(nameLexems, rest) } - if lastAcceptedMatch.end+1 != len(nameRunes) { - rest := nameRunes[lastAcceptedMatch.end+1:] - nameLexems = append(nameLexems, s.breakCasualString(rest)...) - } + poolOfMatches.RedeemMatches(matches) return nameLexems } -func (s *splitter) initialismRuneEqual(a, b rune) bool { - return a == b -} - -func (s *splitter) breakInitialism(original string) nameLexem { +func (s splitter) breakInitialism(original string) nameLexem { return newInitialismNameLexem(original, original) } -func (s *splitter) breakCasualString(str []rune) []nameLexem { - segments := make([]nameLexem, 0) - currentSegment := "" +func (s splitter) appendBrokenDownCasualString(segments *[]nameLexem, str []rune) { + currentSegment := poolOfBuffers.BorrowBuffer(len(str)) // unlike strings.Builder, bytes.Buffer initial storage can reused + defer func() { + poolOfBuffers.RedeemBuffer(currentSegment) + }() addCasualNameLexem := func(original string) { - segments = append(segments, newCasualNameLexem(original)) + *segments = append(*segments, newCasualNameLexem(original)) } addInitialismNameLexem := func(original, match string) { - segments = append(segments, newInitialismNameLexem(original, match)) + *segments = append(*segments, newInitialismNameLexem(original, match)) } - addNameLexem := func(original string) { - if s.postSplitInitialismCheck { - for _, initialism := range s.initialisms { - if upper(initialism) == upper(original) { - addInitialismNameLexem(original, initialism) + var addNameLexem func(string) + if s.postSplitInitialismCheck { + addNameLexem = func(original string) { + for i := range s.initialisms { + if isEqualFoldIgnoreSpace(s.initialismsUpperCased[i], original) { + addInitialismNameLexem(original, s.initialisms[i]) + return } } - } - addCasualNameLexem(original) + addCasualNameLexem(original) + } + } else { + addNameLexem = addCasualNameLexem } - for _, rn := range string(str) { - if replace, found := nameReplaceTable[rn]; found { - if currentSegment != "" { - addNameLexem(currentSegment) - currentSegment = "" + for _, rn := range str { + if replace, found := nameReplaceTable(rn); found { + if currentSegment.Len() > 0 { + addNameLexem(currentSegment.String()) + currentSegment.Reset() } if replace != "" { @@ -236,27 +388,121 @@ func (s *splitter) breakCasualString(str []rune) []nameLexem { } if !unicode.In(rn, unicode.L, unicode.M, unicode.N, unicode.Pc) { - if currentSegment != "" { - addNameLexem(currentSegment) - currentSegment = "" + if currentSegment.Len() > 0 { + addNameLexem(currentSegment.String()) + currentSegment.Reset() } continue } if unicode.IsUpper(rn) { - if currentSegment != "" { - addNameLexem(currentSegment) + if currentSegment.Len() > 0 { + addNameLexem(currentSegment.String()) } - currentSegment = "" + currentSegment.Reset() } - currentSegment += string(rn) + currentSegment.WriteRune(rn) } - if currentSegment != "" { - addNameLexem(currentSegment) + if currentSegment.Len() > 0 { + addNameLexem(currentSegment.String()) } - - return segments +} + +// isEqualFoldIgnoreSpace is the same as strings.EqualFold, but +// it ignores leading and trailing blank spaces in the compared +// string. +// +// base is assumed to be composed of upper-cased runes, and be already +// trimmed. +// +// This code is heavily inspired from strings.EqualFold. +func isEqualFoldIgnoreSpace(base []rune, str string) bool { + var i, baseIndex int + // equivalent to b := []byte(str), but without data copy + b := hackStringBytes(str) + + for i < len(b) { + if c := b[i]; c < utf8.RuneSelf { + // fast path for ASCII + if c != ' ' && c != '\t' { + break + } + i++ + + continue + } + + // unicode case + r, size := utf8.DecodeRune(b[i:]) + if !unicode.IsSpace(r) { + break + } + i += size + } + + if i >= len(b) { + return len(base) == 0 + } + + for _, baseRune := range base { + if i >= len(b) { + break + } + + if c := b[i]; c < utf8.RuneSelf { + // single byte rune case (ASCII) + if baseRune >= utf8.RuneSelf { + return false + } + + baseChar := byte(baseRune) + if c != baseChar && + !('a' <= c && c <= 'z' && c-'a'+'A' == baseChar) { + return false + } + + baseIndex++ + i++ + + continue + } + + // unicode case + r, size := utf8.DecodeRune(b[i:]) + if unicode.ToUpper(r) != baseRune { + return false + } + baseIndex++ + i += size + } + + if baseIndex != len(base) { + return false + } + + // all passed: now we should only have blanks + for i < len(b) { + if c := b[i]; c < utf8.RuneSelf { + // fast path for ASCII + if c != ' ' && c != '\t' { + return false + } + i++ + + continue + } + + // unicode case + r, size := utf8.DecodeRune(b[i:]) + if !unicode.IsSpace(r) { + return false + } + + i += size + } + + return true } diff --git a/vendor/github.com/go-openapi/swag/string_bytes.go b/vendor/github.com/go-openapi/swag/string_bytes.go new file mode 100644 index 000000000..90745d5ca --- /dev/null +++ b/vendor/github.com/go-openapi/swag/string_bytes.go @@ -0,0 +1,8 @@ +package swag + +import "unsafe" + +// hackStringBytes returns the (unsafe) underlying bytes slice of a string. +func hackStringBytes(str string) []byte { + return unsafe.Slice(unsafe.StringData(str), len(str)) +} diff --git a/vendor/github.com/go-openapi/swag/util.go b/vendor/github.com/go-openapi/swag/util.go index d971fbe34..5051401c4 100644 --- a/vendor/github.com/go-openapi/swag/util.go +++ b/vendor/github.com/go-openapi/swag/util.go @@ -18,76 +18,25 @@ "reflect" "strings" "unicode" + "unicode/utf8" ) -// commonInitialisms are common acronyms that are kept as whole uppercased words. -var commonInitialisms *indexOfInitialisms - -// initialisms is a slice of sorted initialisms -var initialisms []string - -var isInitialism func(string) bool - // GoNamePrefixFunc sets an optional rule to prefix go names // which do not start with a letter. // +// The prefix function is assumed to return a string that starts with an upper case letter. +// // e.g. to help convert "123" into "{prefix}123" // // The default is to prefix with "X" var GoNamePrefixFunc func(string) string -func init() { - // Taken from https://github.com/golang/lint/blob/3390df4df2787994aea98de825b964ac7944b817/lint.go#L732-L769 - var configuredInitialisms = map[string]bool{ - "ACL": true, - "API": true, - "ASCII": true, - "CPU": true, - "CSS": true, - "DNS": true, - "EOF": true, - "GUID": true, - "HTML": true, - "HTTPS": true, - "HTTP": true, - "ID": true, - "IP": true, - "IPv4": true, - "IPv6": true, - "JSON": true, - "LHS": true, - "OAI": true, - "QPS": true, - "RAM": true, - "RHS": true, - "RPC": true, - "SLA": true, - "SMTP": true, - "SQL": true, - "SSH": true, - "TCP": true, - "TLS": true, - "TTL": true, - "UDP": true, - "UI": true, - "UID": true, - "UUID": true, - "URI": true, - "URL": true, - "UTF8": true, - "VM": true, - "XML": true, - "XMPP": true, - "XSRF": true, - "XSS": true, +func prefixFunc(name, in string) string { + if GoNamePrefixFunc == nil { + return "X" + in } - // a thread-safe index of initialisms - commonInitialisms = newIndexOfInitialisms().load(configuredInitialisms) - initialisms = commonInitialisms.sorted() - - // a test function - isInitialism = commonInitialisms.isInitialism + return GoNamePrefixFunc(name) + in } const ( @@ -156,25 +105,9 @@ func SplitByFormat(data, format string) []string { return result } -type byInitialism []string - -func (s byInitialism) Len() int { - return len(s) -} -func (s byInitialism) Swap(i, j int) { - s[i], s[j] = s[j], s[i] -} -func (s byInitialism) Less(i, j int) bool { - if len(s[i]) != len(s[j]) { - return len(s[i]) < len(s[j]) - } - - return strings.Compare(s[i], s[j]) > 0 -} - // Removes leading whitespaces func trim(str string) string { - return strings.Trim(str, " ") + return strings.TrimSpace(str) } // Shortcut to strings.ToUpper() @@ -188,15 +121,20 @@ func lower(str string) string { } // Camelize an uppercased word -func Camelize(word string) (camelized string) { +func Camelize(word string) string { + camelized := poolOfBuffers.BorrowBuffer(len(word)) + defer func() { + poolOfBuffers.RedeemBuffer(camelized) + }() + for pos, ru := range []rune(word) { if pos > 0 { - camelized += string(unicode.ToLower(ru)) + camelized.WriteRune(unicode.ToLower(ru)) } else { - camelized += string(unicode.ToUpper(ru)) + camelized.WriteRune(unicode.ToUpper(ru)) } } - return + return camelized.String() } // ToFileName lowercases and underscores a go type name @@ -224,33 +162,40 @@ func ToCommandName(name string) string { // ToHumanNameLower represents a code name as a human series of words func ToHumanNameLower(name string) string { - in := newSplitter(withPostSplitInitialismCheck).split(name) - out := make([]string, 0, len(in)) + s := poolOfSplitters.BorrowSplitter(withPostSplitInitialismCheck) + in := s.split(name) + poolOfSplitters.RedeemSplitter(s) + out := make([]string, 0, len(*in)) - for _, w := range in { + for _, w := range *in { if !w.IsInitialism() { out = append(out, lower(w.GetOriginal())) } else { - out = append(out, w.GetOriginal()) + out = append(out, trim(w.GetOriginal())) } } + poolOfLexems.RedeemLexems(in) return strings.Join(out, " ") } // ToHumanNameTitle represents a code name as a human series of words with the first letters titleized func ToHumanNameTitle(name string) string { - in := newSplitter(withPostSplitInitialismCheck).split(name) + s := poolOfSplitters.BorrowSplitter(withPostSplitInitialismCheck) + in := s.split(name) + poolOfSplitters.RedeemSplitter(s) - out := make([]string, 0, len(in)) - for _, w := range in { - original := w.GetOriginal() + out := make([]string, 0, len(*in)) + for _, w := range *in { + original := trim(w.GetOriginal()) if !w.IsInitialism() { out = append(out, Camelize(original)) } else { out = append(out, original) } } + poolOfLexems.RedeemLexems(in) + return strings.Join(out, " ") } @@ -264,7 +209,7 @@ func ToJSONName(name string) string { out = append(out, lower(w)) continue } - out = append(out, Camelize(w)) + out = append(out, Camelize(trim(w))) } return strings.Join(out, "") } @@ -283,35 +228,70 @@ func ToVarName(name string) string { // ToGoName translates a swagger name which can be underscored or camel cased to a name that golint likes func ToGoName(name string) string { - lexems := newSplitter(withPostSplitInitialismCheck).split(name) + s := poolOfSplitters.BorrowSplitter(withPostSplitInitialismCheck) + lexems := s.split(name) + poolOfSplitters.RedeemSplitter(s) + defer func() { + poolOfLexems.RedeemLexems(lexems) + }() + lexemes := *lexems - result := "" - for _, lexem := range lexems { + if len(lexemes) == 0 { + return "" + } + + result := poolOfBuffers.BorrowBuffer(len(name)) + defer func() { + poolOfBuffers.RedeemBuffer(result) + }() + + // check if not starting with a letter, upper case + firstPart := lexemes[0].GetUnsafeGoName() + if lexemes[0].IsInitialism() { + firstPart = upper(firstPart) + } + + if c := firstPart[0]; c < utf8.RuneSelf { + // ASCII + switch { + case 'A' <= c && c <= 'Z': + result.WriteString(firstPart) + case 'a' <= c && c <= 'z': + result.WriteByte(c - 'a' + 'A') + result.WriteString(firstPart[1:]) + default: + result.WriteString(prefixFunc(name, firstPart)) + // NOTE: no longer check if prefixFunc returns a string that starts with uppercase: + // assume this is always the case + } + } else { + // unicode + firstRune, _ := utf8.DecodeRuneInString(firstPart) + switch { + case !unicode.IsLetter(firstRune): + result.WriteString(prefixFunc(name, firstPart)) + case !unicode.IsUpper(firstRune): + result.WriteString(prefixFunc(name, firstPart)) + /* + result.WriteRune(unicode.ToUpper(firstRune)) + result.WriteString(firstPart[offset:]) + */ + default: + result.WriteString(firstPart) + } + } + + for _, lexem := range lexemes[1:] { goName := lexem.GetUnsafeGoName() // to support old behavior if lexem.IsInitialism() { goName = upper(goName) } - result += goName + result.WriteString(goName) } - if len(result) > 0 { - // Only prefix with X when the first character isn't an ascii letter - first := []rune(result)[0] - if !unicode.IsLetter(first) || (first > unicode.MaxASCII && !unicode.IsUpper(first)) { - if GoNamePrefixFunc == nil { - return "X" + result - } - result = GoNamePrefixFunc(name) + result - } - first = []rune(result)[0] - if unicode.IsLetter(first) && !unicode.IsUpper(first) { - result = string(append([]rune{unicode.ToUpper(first)}, []rune(result)[1:]...)) - } - } - - return result + return result.String() } // ContainsStrings searches a slice of strings for a case-sensitive match @@ -343,7 +323,7 @@ type zeroable interface { func IsZero(data interface{}) bool { v := reflect.ValueOf(data) // check for nil data - switch v.Kind() { + switch v.Kind() { //nolint:exhaustive case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: if v.IsNil() { return true @@ -356,7 +336,7 @@ func IsZero(data interface{}) bool { } // continue with slightly more complex reflection - switch v.Kind() { + switch v.Kind() { //nolint:exhaustive case reflect.String: return v.Len() == 0 case reflect.Bool: @@ -376,16 +356,6 @@ func IsZero(data interface{}) bool { } } -// AddInitialisms add additional initialisms -func AddInitialisms(words ...string) { - for _, word := range words { - // commonInitialisms[upper(word)] = true - commonInitialisms.add(upper(word)) - } - // sort again - initialisms = commonInitialisms.sorted() -} - // CommandLineOptionsGroup represents a group of user-defined command line options type CommandLineOptionsGroup struct { ShortDescription string diff --git a/vendor/github.com/go-openapi/swag/yaml.go b/vendor/github.com/go-openapi/swag/yaml.go index f09ee609f..f59e02593 100644 --- a/vendor/github.com/go-openapi/swag/yaml.go +++ b/vendor/github.com/go-openapi/swag/yaml.go @@ -16,8 +16,11 @@ import ( "encoding/json" + "errors" "fmt" "path/filepath" + "reflect" + "sort" "strconv" "github.com/mailru/easyjson/jlexer" @@ -48,7 +51,7 @@ func BytesToYAMLDoc(data []byte) (interface{}, error) { return nil, err } if document.Kind != yaml.DocumentNode || len(document.Content) != 1 || document.Content[0].Kind != yaml.MappingNode { - return nil, fmt.Errorf("only YAML documents that are objects are supported") + return nil, errors.New("only YAML documents that are objects are supported") } return &document, nil } @@ -147,7 +150,7 @@ func yamlScalar(node *yaml.Node) (interface{}, error) { case yamlTimestamp: return node.Value, nil case yamlNull: - return nil, nil + return nil, nil //nolint:nilnil default: return nil, fmt.Errorf("YAML tag %q is not supported", node.LongTag()) } @@ -245,7 +248,27 @@ func (s JSONMapSlice) MarshalYAML() (interface{}, error) { return yaml.Marshal(&n) } +func isNil(input interface{}) bool { + if input == nil { + return true + } + kind := reflect.TypeOf(input).Kind() + switch kind { //nolint:exhaustive + case reflect.Ptr, reflect.Map, reflect.Slice, reflect.Chan: + return reflect.ValueOf(input).IsNil() + default: + return false + } +} + func json2yaml(item interface{}) (*yaml.Node, error) { + if isNil(item) { + return &yaml.Node{ + Kind: yaml.ScalarNode, + Value: "null", + }, nil + } + switch val := item.(type) { case JSONMapSlice: var n yaml.Node @@ -265,7 +288,14 @@ func json2yaml(item interface{}) (*yaml.Node, error) { case map[string]interface{}: var n yaml.Node n.Kind = yaml.MappingNode - for k, v := range val { + keys := make([]string, 0, len(val)) + for k := range val { + keys = append(keys, k) + } + sort.Strings(keys) + + for _, k := range keys { + v := val[k] childNode, err := json2yaml(v) if err != nil { return nil, err @@ -318,8 +348,9 @@ func json2yaml(item interface{}) (*yaml.Node, error) { Tag: yamlBoolScalar, Value: strconv.FormatBool(val), }, nil + default: + return nil, fmt.Errorf("unhandled type: %T", val) } - return nil, nil } // JSONMapItem represents the value of a key in a JSON object held by JSONMapSlice diff --git a/vendor/github.com/go-openapi/validate/.golangci.yml b/vendor/github.com/go-openapi/validate/.golangci.yml index 81818ca67..22f8d21cc 100644 --- a/vendor/github.com/go-openapi/validate/.golangci.yml +++ b/vendor/github.com/go-openapi/validate/.golangci.yml @@ -1,12 +1,14 @@ linters-settings: govet: check-shadowing: true + golint: + min-confidence: 0 gocyclo: - min-complexity: 50 + min-complexity: 45 maligned: suggest-new: true dupl: - threshold: 100 + threshold: 200 goconst: min-len: 2 min-occurrences: 3 @@ -15,36 +17,45 @@ linters: enable-all: true disable: - maligned + - unparam - lll + - gochecknoinits + - gochecknoglobals + - funlen - godox - gocognit - whitespace - wsl - - funlen - - gochecknoglobals - - gochecknoinits - - scopelint - wrapcheck - - exhaustivestruct - - exhaustive - - nlreturn - testpackage - - gci - - gofumpt - - goerr113 + - nlreturn - gomnd - - tparallel + - exhaustivestruct + - goerr113 + - errorlint - nestif - godot - - tparallel + - gofumpt - paralleltest - - cyclop # because we have gocyclo already - # TODO: review the linters below. We disabled them to make the CI pass first. - - ireturn - - varnamelen - - forcetypeassert + - tparallel - thelper - # Disable deprecated linters. - # They will be removed from golangci-lint in future. + - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode - interfacer - - golint \ No newline at end of file + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/validate/BENCHMARK.md b/vendor/github.com/go-openapi/validate/BENCHMARK.md new file mode 100644 index 000000000..79cf6a077 --- /dev/null +++ b/vendor/github.com/go-openapi/validate/BENCHMARK.md @@ -0,0 +1,31 @@ +# Benchmark + +Validating the Kubernetes Swagger API + +## v0.22.6: 60,000,000 allocs +``` +goos: linux +goarch: amd64 +pkg: github.com/go-openapi/validate +cpu: AMD Ryzen 7 5800X 8-Core Processor +Benchmark_KubernetesSpec/validating_kubernetes_API-16 1 8549863982 ns/op 7067424936 B/op 59583275 allocs/op +``` + +## After refact PR: minor but noticable improvements: 25,000,000 allocs +``` +go test -bench Spec +goos: linux +goarch: amd64 +pkg: github.com/go-openapi/validate +cpu: AMD Ryzen 7 5800X 8-Core Processor +Benchmark_KubernetesSpec/validating_kubernetes_API-16 1 4064535557 ns/op 3379715592 B/op 25320330 allocs/op +``` + +## After reduce GC pressure PR: 17,000,000 allocs +``` +goos: linux +goarch: amd64 +pkg: github.com/go-openapi/validate +cpu: AMD Ryzen 7 5800X 8-Core Processor +Benchmark_KubernetesSpec/validating_kubernetes_API-16 1 3758414145 ns/op 2593881496 B/op 17111373 allocs/op +``` diff --git a/vendor/github.com/go-openapi/validate/README.md b/vendor/github.com/go-openapi/validate/README.md index ea2d68cb6..e8e1bb218 100644 --- a/vendor/github.com/go-openapi/validate/README.md +++ b/vendor/github.com/go-openapi/validate/README.md @@ -1,7 +1,5 @@ -# Validation helpers -[![Build Status](https://travis-ci.org/go-openapi/validate.svg?branch=master)](https://travis-ci.org/go-openapi/validate) -[![Build status](https://ci.appveyor.com/api/projects/status/d6epy6vipueyh5fs/branch/master?svg=true)](https://ci.appveyor.com/project/fredbi/validate/branch/master) -[![codecov](https://codecov.io/gh/go-openapi/validate/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/validate) +# Validation helpers [![Build Status](https://github.com/go-openapi/validate/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/validate/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/validate/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/validate) + [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) [![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/validate/master/LICENSE) [![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/validate.svg)](https://pkg.go.dev/github.com/go-openapi/validate) @@ -24,7 +22,7 @@ Reference can be found here: https://github.com/OAI/OpenAPI-Specification/blob/m * Minimum, Maximum, MultipleOf * FormatOf -[Documentation](https://godoc.org/github.com/go-openapi/validate) +[Documentation](https://pkg.go.dev/github.com/go-openapi/validate) ## FAQ diff --git a/vendor/github.com/go-openapi/validate/appveyor.yml b/vendor/github.com/go-openapi/validate/appveyor.yml deleted file mode 100644 index 89e5bccb3..000000000 --- a/vendor/github.com/go-openapi/validate/appveyor.yml +++ /dev/null @@ -1,32 +0,0 @@ -version: "0.1.{build}" - -clone_folder: C:\go-openapi\validate -shallow_clone: true # for startup speed -pull_requests: - do_not_increment_build_number: true - -#skip_tags: true -#skip_branch_with_pr: true - -# appveyor.yml -build: off - -environment: - GOPATH: c:\gopath - -stack: go 1.15 - -test_script: - - go test -v -timeout 20m -args -enable-long ./... - -deploy: off - -notifications: - - provider: Slack - incoming_webhook: https://hooks.slack.com/services/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 - auth_token: - secure: Sf7kZf7ZGbnwWUMpffHwMu5A0cHkLK2MYY32LNTPj4+/3qC3Ghl7+9v4TSLOqOlCwdRNjOGblAq7s+GDJed6/xgRQl1JtCi1klzZNrYX4q01pgTPvvGcwbBkIYgeMaPeIRcK9OZnud7sRXdttozgTOpytps2U6Js32ip7uj5mHSg2ub0FwoSJwlS6dbezZ8+eDhoha0F/guY99BEwx8Bd+zROrT2TFGsSGOFGN6wFc7moCqTHO/YkWib13a2QNXqOxCCVBy/lt76Wp+JkeFppjHlzs/2lP3EAk13RIUAaesdEUHvIHrzCyNJEd3/+KO2DzsWOYfpktd+KBCvgaYOsoo7ubdT3IROeAegZdCgo/6xgCEsmFc9ZcqCfN5yNx2A+BZ2Vwmpws+bQ1E1+B5HDzzaiLcYfG4X2O210QVGVDLWsv1jqD+uPYeHY2WRfh5ZsIUFvaqgUEnwHwrK44/8REAhQavt1QAj5uJpsRd7CkRVPWRNK+yIky+wgbVUFEchRNmS55E7QWf+W4+4QZkQi7vUTMc9nbTUu2Es9NfvfudOpM2wZbn98fjpb/qq/nRv6Bk+ca+7XD5/IgNLMbWp2ouDdzbiHLCOfDUiHiDJhLfFZx9Bwo7ZwfzeOlbrQX66bx7xRKYmOe4DLrXhNcpbsMa8qbfxlZRCmYbubB/Y8h4= - channel: bots - on_build_success: false - on_build_failure: true - on_build_status_changed: true diff --git a/vendor/github.com/go-openapi/validate/default_validator.go b/vendor/github.com/go-openapi/validate/default_validator.go index bd14c2a26..e0dd93839 100644 --- a/vendor/github.com/go-openapi/validate/default_validator.go +++ b/vendor/github.com/go-openapi/validate/default_validator.go @@ -25,48 +25,55 @@ // According to Swagger spec, default values MUST validate their schema. type defaultValidator struct { SpecValidator *SpecValidator - visitedSchemas map[string]bool + visitedSchemas map[string]struct{} + schemaOptions *SchemaValidatorOptions } // resetVisited resets the internal state of visited schemas func (d *defaultValidator) resetVisited() { - d.visitedSchemas = map[string]bool{} + if d.visitedSchemas == nil { + d.visitedSchemas = make(map[string]struct{}) + + return + } + + // TODO(go1.21): clear(ex.visitedSchemas) + for k := range d.visitedSchemas { + delete(d.visitedSchemas, k) + } } -func isVisited(path string, visitedSchemas map[string]bool) bool { - found := visitedSchemas[path] - if !found { - // search for overlapping paths - frags := strings.Split(path, ".") - if len(frags) < 2 { - // shortcut exit on smaller paths - return found +func isVisited(path string, visitedSchemas map[string]struct{}) bool { + _, found := visitedSchemas[path] + if found { + return true + } + + // search for overlapping paths + var ( + parent string + suffix string + ) + for i := len(path) - 2; i >= 0; i-- { + r := path[i] + if r != '.' { + continue } - last := len(frags) - 1 - var currentFragStr, parent string - for i := range frags { - if i == 0 { - currentFragStr = frags[last] - } else { - currentFragStr = strings.Join([]string{frags[last-i], currentFragStr}, ".") - } - if i < last { - parent = strings.Join(frags[0:last-i], ".") - } else { - parent = "" - } - if strings.HasSuffix(parent, currentFragStr) { - found = true - break - } + + parent = path[0:i] + suffix = path[i+1:] + + if strings.HasSuffix(parent, suffix) { + return true } } - return found + + return false } // beingVisited asserts a schema is being visited func (d *defaultValidator) beingVisited(path string) { - d.visitedSchemas[path] = true + d.visitedSchemas[path] = struct{}{} } // isVisited tells if a path has already been visited @@ -75,8 +82,9 @@ func (d *defaultValidator) isVisited(path string) bool { } // Validate validates the default values declared in the swagger spec -func (d *defaultValidator) Validate() (errs *Result) { - errs = new(Result) +func (d *defaultValidator) Validate() *Result { + errs := pools.poolOfResults.BorrowResult() // will redeem when merged + if d == nil || d.SpecValidator == nil { return errs } @@ -89,7 +97,7 @@ func (d *defaultValidator) validateDefaultValueValidAgainstSchema() *Result { // every default value that is specified must validate against the schema for that property // headers, items, parameters, schema - res := new(Result) + res := pools.poolOfResults.BorrowResult() // will redeem when merged s := d.SpecValidator for method, pathItem := range s.expandedAnalyzer().Operations() { @@ -107,10 +115,12 @@ func (d *defaultValidator) validateDefaultValueValidAgainstSchema() *Result { // default values provided must validate against their inline definition (no explicit schema) if param.Default != nil && param.Schema == nil { // check param default value is valid - red := NewParamValidator(¶m, s.KnownFormats).Validate(param.Default) //#nosec + red := newParamValidator(¶m, s.KnownFormats, d.schemaOptions).Validate(param.Default) //#nosec if red.HasErrorsOrWarnings() { res.AddErrors(defaultValueDoesNotValidateMsg(param.Name, param.In)) res.Merge(red) + } else if red.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(red) } } @@ -120,6 +130,8 @@ func (d *defaultValidator) validateDefaultValueValidAgainstSchema() *Result { if red.HasErrorsOrWarnings() { res.AddErrors(defaultValueItemsDoesNotValidateMsg(param.Name, param.In)) res.Merge(red) + } else if red.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(red) } } @@ -129,6 +141,8 @@ func (d *defaultValidator) validateDefaultValueValidAgainstSchema() *Result { if red.HasErrorsOrWarnings() { res.AddErrors(defaultValueDoesNotValidateMsg(param.Name, param.In)) res.Merge(red) + } else if red.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(red) } } } @@ -154,7 +168,7 @@ func (d *defaultValidator) validateDefaultValueValidAgainstSchema() *Result { // reset explored schemas to get depth-first recursive-proof exploration d.resetVisited() for nm, sch := range s.spec.Spec().Definitions { - res.Merge(d.validateDefaultValueSchemaAgainstSchema(fmt.Sprintf("definitions.%s", nm), "body", &sch)) //#nosec + res.Merge(d.validateDefaultValueSchemaAgainstSchema("definitions."+nm, "body", &sch)) //#nosec } } return res @@ -170,17 +184,18 @@ func (d *defaultValidator) validateDefaultInResponse(resp *spec.Response, respon responseName, responseCodeAsStr := responseHelp.responseMsgVariants(responseType, responseCode) - // nolint: dupl if response.Headers != nil { // Safeguard for nm, h := range response.Headers { // reset explored schemas to get depth-first recursive-proof exploration d.resetVisited() if h.Default != nil { - red := NewHeaderValidator(nm, &h, s.KnownFormats).Validate(h.Default) //#nosec + red := newHeaderValidator(nm, &h, s.KnownFormats, d.schemaOptions).Validate(h.Default) //#nosec if red.HasErrorsOrWarnings() { res.AddErrors(defaultValueHeaderDoesNotValidateMsg(operationID, nm, responseName)) res.Merge(red) + } else if red.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(red) } } @@ -190,6 +205,8 @@ func (d *defaultValidator) validateDefaultInResponse(resp *spec.Response, respon if red.HasErrorsOrWarnings() { res.AddErrors(defaultValueHeaderItemsDoesNotValidateMsg(operationID, nm, responseName)) res.Merge(red) + } else if red.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(red) } } @@ -209,6 +226,8 @@ func (d *defaultValidator) validateDefaultInResponse(resp *spec.Response, respon // Additional message to make sure the context of the error is not lost res.AddErrors(defaultValueInDoesNotValidateMsg(operationID, responseName)) res.Merge(red) + } else if red.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(red) } } return res @@ -220,11 +239,13 @@ func (d *defaultValidator) validateDefaultValueSchemaAgainstSchema(path, in stri return nil } d.beingVisited(path) - res := new(Result) + res := pools.poolOfResults.BorrowResult() s := d.SpecValidator if schema.Default != nil { - res.Merge(NewSchemaValidator(schema, s.spec.Spec(), path+".default", s.KnownFormats, SwaggerSchema(true)).Validate(schema.Default)) + res.Merge( + newSchemaValidator(schema, s.spec.Spec(), path+".default", s.KnownFormats, d.schemaOptions).Validate(schema.Default), + ) } if schema.Items != nil { if schema.Items.Schema != nil { @@ -242,7 +263,7 @@ func (d *defaultValidator) validateDefaultValueSchemaAgainstSchema(path, in stri } if schema.AdditionalItems != nil && schema.AdditionalItems.Schema != nil { // NOTE: we keep validating values, even though additionalItems is not supported by Swagger 2.0 (and 3.0 as well) - res.Merge(d.validateDefaultValueSchemaAgainstSchema(fmt.Sprintf("%s.additionalItems", path), in, schema.AdditionalItems.Schema)) + res.Merge(d.validateDefaultValueSchemaAgainstSchema(path+".additionalItems", in, schema.AdditionalItems.Schema)) } for propName, prop := range schema.Properties { res.Merge(d.validateDefaultValueSchemaAgainstSchema(path+"."+propName, in, &prop)) //#nosec @@ -251,7 +272,7 @@ func (d *defaultValidator) validateDefaultValueSchemaAgainstSchema(path, in stri res.Merge(d.validateDefaultValueSchemaAgainstSchema(path+"."+propName, in, &prop)) //#nosec } if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil { - res.Merge(d.validateDefaultValueSchemaAgainstSchema(fmt.Sprintf("%s.additionalProperties", path), in, schema.AdditionalProperties.Schema)) + res.Merge(d.validateDefaultValueSchemaAgainstSchema(path+".additionalProperties", in, schema.AdditionalProperties.Schema)) } if schema.AllOf != nil { for i, aoSch := range schema.AllOf { @@ -262,13 +283,15 @@ func (d *defaultValidator) validateDefaultValueSchemaAgainstSchema(path, in stri } // TODO: Temporary duplicated code. Need to refactor with examples -// nolint: dupl + func (d *defaultValidator) validateDefaultValueItemsAgainstSchema(path, in string, root interface{}, items *spec.Items) *Result { - res := new(Result) + res := pools.poolOfResults.BorrowResult() s := d.SpecValidator if items != nil { if items.Default != nil { - res.Merge(newItemsValidator(path, in, items, root, s.KnownFormats).Validate(0, items.Default)) + res.Merge( + newItemsValidator(path, in, items, root, s.KnownFormats, d.schemaOptions).Validate(0, items.Default), + ) } if items.Items != nil { res.Merge(d.validateDefaultValueItemsAgainstSchema(path+"[0].default", in, root, items.Items)) diff --git a/vendor/github.com/go-openapi/validate/doc.go b/vendor/github.com/go-openapi/validate/doc.go index f5ca9a5d5..d2b901eab 100644 --- a/vendor/github.com/go-openapi/validate/doc.go +++ b/vendor/github.com/go-openapi/validate/doc.go @@ -19,7 +19,7 @@ This package follows Swagger 2.0. specification (aka OpenAPI 2.0). Reference can be found here: https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md. -Validating a specification +# Validating a specification Validates a spec document (from JSON or YAML) against the JSON schema for swagger, then checks a number of extra rules that can't be expressed in JSON schema. @@ -30,34 +30,36 @@ - SpecValidator.Validate() Reported as errors: - [x] definition can't declare a property that's already defined by one of its ancestors - [x] definition's ancestor can't be a descendant of the same model - [x] path uniqueness: each api path should be non-verbatim (account for path param names) unique per method - [x] each security reference should contain only unique scopes - [x] each security scope in a security definition should be unique - [x] parameters in path must be unique - [x] each path parameter must correspond to a parameter placeholder and vice versa - [x] each referenceable definition must have references - [x] each definition property listed in the required array must be defined in the properties of the model - [x] each parameter should have a unique `name` and `type` combination - [x] each operation should have only 1 parameter of type body - [x] each reference must point to a valid object - [x] every default value that is specified must validate against the schema for that property - [x] items property is required for all schemas/definitions of type `array` - [x] path parameters must be declared a required - [x] headers must not contain $ref - [x] schema and property examples provided must validate against their respective object's schema - [x] examples provided must validate their schema + + [x] definition can't declare a property that's already defined by one of its ancestors + [x] definition's ancestor can't be a descendant of the same model + [x] path uniqueness: each api path should be non-verbatim (account for path param names) unique per method. Validation can be laxed by disabling StrictPathParamUniqueness. + [x] each security reference should contain only unique scopes + [x] each security scope in a security definition should be unique + [x] parameters in path must be unique + [x] each path parameter must correspond to a parameter placeholder and vice versa + [x] each referenceable definition must have references + [x] each definition property listed in the required array must be defined in the properties of the model + [x] each parameter should have a unique `name` and `type` combination + [x] each operation should have only 1 parameter of type body + [x] each reference must point to a valid object + [x] every default value that is specified must validate against the schema for that property + [x] items property is required for all schemas/definitions of type `array` + [x] path parameters must be declared a required + [x] headers must not contain $ref + [x] schema and property examples provided must validate against their respective object's schema + [x] examples provided must validate their schema Reported as warnings: - [x] path parameters should not contain any of [{,},\w] - [x] empty path - [x] unused definitions - [x] unsupported validation of examples on non-JSON media types - [x] examples in response without schema - [x] readOnly properties should not be required -Validating a schema + [x] path parameters should not contain any of [{,},\w] + [x] empty path + [x] unused definitions + [x] unsupported validation of examples on non-JSON media types + [x] examples in response without schema + [x] readOnly properties should not be required + +# Validating a schema The schema validation toolkit validates data against JSON-schema-draft 04 schema. @@ -70,16 +72,16 @@ - AgainstSchema() - ... -Known limitations +# Known limitations With the current version of this package, the following aspects of swagger are not yet supported: - [ ] errors and warnings are not reported with key/line number in spec - [ ] default values and examples on responses only support application/json producer type - [ ] invalid numeric constraints (such as Minimum, etc..) are not checked except for default and example values - [ ] rules for collectionFormat are not implemented - [ ] no validation rule for polymorphism support (discriminator) [not done here] - [ ] valid js ECMA regexp not supported by Go regexp engine are considered invalid - [ ] arbitrary large numbers are not supported: max is math.MaxFloat64 + [ ] errors and warnings are not reported with key/line number in spec + [ ] default values and examples on responses only support application/json producer type + [ ] invalid numeric constraints (such as Minimum, etc..) are not checked except for default and example values + [ ] rules for collectionFormat are not implemented + [ ] no validation rule for polymorphism support (discriminator) [not done here] + [ ] valid js ECMA regexp not supported by Go regexp engine are considered invalid + [ ] arbitrary large numbers are not supported: max is math.MaxFloat64 */ package validate diff --git a/vendor/github.com/go-openapi/validate/example_validator.go b/vendor/github.com/go-openapi/validate/example_validator.go index c8bffd78e..d08956973 100644 --- a/vendor/github.com/go-openapi/validate/example_validator.go +++ b/vendor/github.com/go-openapi/validate/example_validator.go @@ -23,17 +23,27 @@ // ExampleValidator validates example values defined in a spec type exampleValidator struct { SpecValidator *SpecValidator - visitedSchemas map[string]bool + visitedSchemas map[string]struct{} + schemaOptions *SchemaValidatorOptions } // resetVisited resets the internal state of visited schemas func (ex *exampleValidator) resetVisited() { - ex.visitedSchemas = map[string]bool{} + if ex.visitedSchemas == nil { + ex.visitedSchemas = make(map[string]struct{}) + + return + } + + // TODO(go1.21): clear(ex.visitedSchemas) + for k := range ex.visitedSchemas { + delete(ex.visitedSchemas, k) + } } // beingVisited asserts a schema is being visited func (ex *exampleValidator) beingVisited(path string) { - ex.visitedSchemas[path] = true + ex.visitedSchemas[path] = struct{}{} } // isVisited tells if a path has already been visited @@ -48,9 +58,9 @@ func (ex *exampleValidator) isVisited(path string) bool { // - schemas // - individual property // - responses -// -func (ex *exampleValidator) Validate() (errs *Result) { - errs = new(Result) +func (ex *exampleValidator) Validate() *Result { + errs := pools.poolOfResults.BorrowResult() + if ex == nil || ex.SpecValidator == nil { return errs } @@ -65,7 +75,7 @@ func (ex *exampleValidator) validateExampleValueValidAgainstSchema() *Result { // in: schemas, properties, object, items // not in: headers, parameters without schema - res := new(Result) + res := pools.poolOfResults.BorrowResult() s := ex.SpecValidator for method, pathItem := range s.expandedAnalyzer().Operations() { @@ -83,10 +93,12 @@ func (ex *exampleValidator) validateExampleValueValidAgainstSchema() *Result { // default values provided must validate against their inline definition (no explicit schema) if param.Example != nil && param.Schema == nil { // check param default value is valid - red := NewParamValidator(¶m, s.KnownFormats).Validate(param.Example) //#nosec + red := newParamValidator(¶m, s.KnownFormats, ex.schemaOptions).Validate(param.Example) //#nosec if red.HasErrorsOrWarnings() { res.AddWarnings(exampleValueDoesNotValidateMsg(param.Name, param.In)) res.MergeAsWarnings(red) + } else if red.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(red) } } @@ -96,6 +108,8 @@ func (ex *exampleValidator) validateExampleValueValidAgainstSchema() *Result { if red.HasErrorsOrWarnings() { res.AddWarnings(exampleValueItemsDoesNotValidateMsg(param.Name, param.In)) res.Merge(red) + } else if red.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(red) } } @@ -105,6 +119,8 @@ func (ex *exampleValidator) validateExampleValueValidAgainstSchema() *Result { if red.HasErrorsOrWarnings() { res.AddWarnings(exampleValueDoesNotValidateMsg(param.Name, param.In)) res.Merge(red) + } else if red.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(red) } } } @@ -130,7 +146,7 @@ func (ex *exampleValidator) validateExampleValueValidAgainstSchema() *Result { // reset explored schemas to get depth-first recursive-proof exploration ex.resetVisited() for nm, sch := range s.spec.Spec().Definitions { - res.Merge(ex.validateExampleValueSchemaAgainstSchema(fmt.Sprintf("definitions.%s", nm), "body", &sch)) //#nosec + res.Merge(ex.validateExampleValueSchemaAgainstSchema("definitions."+nm, "body", &sch)) //#nosec } } return res @@ -146,17 +162,18 @@ func (ex *exampleValidator) validateExampleInResponse(resp *spec.Response, respo responseName, responseCodeAsStr := responseHelp.responseMsgVariants(responseType, responseCode) - // nolint: dupl if response.Headers != nil { // Safeguard for nm, h := range response.Headers { // reset explored schemas to get depth-first recursive-proof exploration ex.resetVisited() if h.Example != nil { - red := NewHeaderValidator(nm, &h, s.KnownFormats).Validate(h.Example) //#nosec + red := newHeaderValidator(nm, &h, s.KnownFormats, ex.schemaOptions).Validate(h.Example) //#nosec if red.HasErrorsOrWarnings() { res.AddWarnings(exampleValueHeaderDoesNotValidateMsg(operationID, nm, responseName)) res.MergeAsWarnings(red) + } else if red.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(red) } } @@ -166,6 +183,8 @@ func (ex *exampleValidator) validateExampleInResponse(resp *spec.Response, respo if red.HasErrorsOrWarnings() { res.AddWarnings(exampleValueHeaderItemsDoesNotValidateMsg(operationID, nm, responseName)) res.MergeAsWarnings(red) + } else if red.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(red) } } @@ -185,13 +204,17 @@ func (ex *exampleValidator) validateExampleInResponse(resp *spec.Response, respo // Additional message to make sure the context of the error is not lost res.AddWarnings(exampleValueInDoesNotValidateMsg(operationID, responseName)) res.Merge(red) + } else if red.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(red) } } if response.Examples != nil { if response.Schema != nil { if example, ok := response.Examples["application/json"]; ok { - res.MergeAsWarnings(NewSchemaValidator(response.Schema, s.spec.Spec(), path+".examples", s.KnownFormats, SwaggerSchema(true)).Validate(example)) + res.MergeAsWarnings( + newSchemaValidator(response.Schema, s.spec.Spec(), path+".examples", s.KnownFormats, s.schemaOptions).Validate(example), + ) } else { // TODO: validate other media types too res.AddWarnings(examplesMimeNotSupportedMsg(operationID, responseName)) @@ -210,10 +233,12 @@ func (ex *exampleValidator) validateExampleValueSchemaAgainstSchema(path, in str } ex.beingVisited(path) s := ex.SpecValidator - res := new(Result) + res := pools.poolOfResults.BorrowResult() if schema.Example != nil { - res.MergeAsWarnings(NewSchemaValidator(schema, s.spec.Spec(), path+".example", s.KnownFormats, SwaggerSchema(true)).Validate(schema.Example)) + res.MergeAsWarnings( + newSchemaValidator(schema, s.spec.Spec(), path+".example", s.KnownFormats, ex.schemaOptions).Validate(schema.Example), + ) } if schema.Items != nil { if schema.Items.Schema != nil { @@ -231,7 +256,7 @@ func (ex *exampleValidator) validateExampleValueSchemaAgainstSchema(path, in str } if schema.AdditionalItems != nil && schema.AdditionalItems.Schema != nil { // NOTE: we keep validating values, even though additionalItems is unsupported in Swagger 2.0 (and 3.0 as well) - res.Merge(ex.validateExampleValueSchemaAgainstSchema(fmt.Sprintf("%s.additionalItems", path), in, schema.AdditionalItems.Schema)) + res.Merge(ex.validateExampleValueSchemaAgainstSchema(path+".additionalItems", in, schema.AdditionalItems.Schema)) } for propName, prop := range schema.Properties { res.Merge(ex.validateExampleValueSchemaAgainstSchema(path+"."+propName, in, &prop)) //#nosec @@ -240,7 +265,7 @@ func (ex *exampleValidator) validateExampleValueSchemaAgainstSchema(path, in str res.Merge(ex.validateExampleValueSchemaAgainstSchema(path+"."+propName, in, &prop)) //#nosec } if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil { - res.Merge(ex.validateExampleValueSchemaAgainstSchema(fmt.Sprintf("%s.additionalProperties", path), in, schema.AdditionalProperties.Schema)) + res.Merge(ex.validateExampleValueSchemaAgainstSchema(path+".additionalProperties", in, schema.AdditionalProperties.Schema)) } if schema.AllOf != nil { for i, aoSch := range schema.AllOf { @@ -251,13 +276,16 @@ func (ex *exampleValidator) validateExampleValueSchemaAgainstSchema(path, in str } // TODO: Temporary duplicated code. Need to refactor with examples -// nolint: dupl +// + func (ex *exampleValidator) validateExampleValueItemsAgainstSchema(path, in string, root interface{}, items *spec.Items) *Result { - res := new(Result) + res := pools.poolOfResults.BorrowResult() s := ex.SpecValidator if items != nil { if items.Example != nil { - res.MergeAsWarnings(newItemsValidator(path, in, items, root, s.KnownFormats).Validate(0, items.Example)) + res.MergeAsWarnings( + newItemsValidator(path, in, items, root, s.KnownFormats, ex.schemaOptions).Validate(0, items.Example), + ) } if items.Items != nil { res.Merge(ex.validateExampleValueItemsAgainstSchema(path+"[0].example", in, root, items.Items)) @@ -266,5 +294,6 @@ func (ex *exampleValidator) validateExampleValueItemsAgainstSchema(path, in stri res.AddErrors(invalidPatternInMsg(path, in, items.Pattern)) } } + return res } diff --git a/vendor/github.com/go-openapi/validate/formats.go b/vendor/github.com/go-openapi/validate/formats.go index 0ad996cbb..f4e355213 100644 --- a/vendor/github.com/go-openapi/validate/formats.go +++ b/vendor/github.com/go-openapi/validate/formats.go @@ -22,10 +22,32 @@ ) type formatValidator struct { - Format string Path string In string + Format string KnownFormats strfmt.Registry + Options *SchemaValidatorOptions +} + +func newFormatValidator(path, in, format string, formats strfmt.Registry, opts *SchemaValidatorOptions) *formatValidator { + if opts == nil { + opts = new(SchemaValidatorOptions) + } + + var f *formatValidator + if opts.recycleValidators { + f = pools.poolOfFormatValidators.BorrowValidator() + } else { + f = new(formatValidator) + } + + f.Path = path + f.In = in + f.Format = format + f.KnownFormats = formats + f.Options = opts + + return f } func (f *formatValidator) SetPath(path string) { @@ -33,37 +55,45 @@ func (f *formatValidator) SetPath(path string) { } func (f *formatValidator) Applies(source interface{}, kind reflect.Kind) bool { - doit := func() bool { - if source == nil { - return false - } - switch source := source.(type) { - case *spec.Items: - return kind == reflect.String && f.KnownFormats.ContainsName(source.Format) - case *spec.Parameter: - return kind == reflect.String && f.KnownFormats.ContainsName(source.Format) - case *spec.Schema: - return kind == reflect.String && f.KnownFormats.ContainsName(source.Format) - case *spec.Header: - return kind == reflect.String && f.KnownFormats.ContainsName(source.Format) - } + if source == nil || f.KnownFormats == nil { + return false + } + + switch source := source.(type) { + case *spec.Items: + return kind == reflect.String && f.KnownFormats.ContainsName(source.Format) + case *spec.Parameter: + return kind == reflect.String && f.KnownFormats.ContainsName(source.Format) + case *spec.Schema: + return kind == reflect.String && f.KnownFormats.ContainsName(source.Format) + case *spec.Header: + return kind == reflect.String && f.KnownFormats.ContainsName(source.Format) + default: return false } - r := doit() - debugLog("format validator for %q applies %t for %T (kind: %v)\n", f.Path, r, source, kind) - return r } func (f *formatValidator) Validate(val interface{}) *Result { - result := new(Result) - debugLog("validating \"%v\" against format: %s", val, f.Format) + if f.Options.recycleValidators { + defer func() { + f.redeem() + }() + } + + var result *Result + if f.Options.recycleResult { + result = pools.poolOfResults.BorrowResult() + } else { + result = new(Result) + } if err := FormatOf(f.Path, f.In, f.Format, val.(string), f.KnownFormats); err != nil { result.AddErrors(err) } - if result.HasErrors() { - return result - } - return nil + return result +} + +func (f *formatValidator) redeem() { + pools.poolOfFormatValidators.RedeemValidator(f) } diff --git a/vendor/github.com/go-openapi/validate/helpers.go b/vendor/github.com/go-openapi/validate/helpers.go index 48ebfab58..757e403d9 100644 --- a/vendor/github.com/go-openapi/validate/helpers.go +++ b/vendor/github.com/go-openapi/validate/helpers.go @@ -101,9 +101,17 @@ type errorHelper struct { // A collection of unexported helpers for error construction } -func (h *errorHelper) sErr(err errors.Error) *Result { +func (h *errorHelper) sErr(err errors.Error, recycle bool) *Result { // Builds a Result from standard errors.Error - return &Result{Errors: []error{err}} + var result *Result + if recycle { + result = pools.poolOfResults.BorrowResult() + } else { + result = new(Result) + } + result.Errors = []error{err} + + return result } func (h *errorHelper) addPointerError(res *Result, err error, ref string, fromPath string) *Result { @@ -157,7 +165,7 @@ func (h *valueHelper) asInt64(val interface{}) int64 { // Number conversion function for int64, without error checking // (implements an implicit type upgrade). v := reflect.ValueOf(val) - switch v.Kind() { + switch v.Kind() { //nolint:exhaustive case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: return v.Int() case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: @@ -174,7 +182,7 @@ func (h *valueHelper) asUint64(val interface{}) uint64 { // Number conversion function for uint64, without error checking // (implements an implicit type upgrade). v := reflect.ValueOf(val) - switch v.Kind() { + switch v.Kind() { //nolint:exhaustive case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: return uint64(v.Int()) case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: @@ -192,7 +200,7 @@ func (h *valueHelper) asFloat64(val interface{}) float64 { // Number conversion function for float64, without error checking // (implements an implicit type upgrade). v := reflect.ValueOf(val) - switch v.Kind() { + switch v.Kind() { //nolint:exhaustive case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: return float64(v.Int()) case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: @@ -225,7 +233,7 @@ func (h *paramHelper) safeExpandedParamsFor(path, method, operationID string, re operation.Parameters = resolvedParams for _, ppr := range s.expandedAnalyzer().SafeParamsFor(method, path, - func(p spec.Parameter, err error) bool { + func(_ spec.Parameter, err error) bool { // since params have already been expanded, there are few causes for error res.AddErrors(someParametersBrokenMsg(path, method, operationID)) // original error from analyzer @@ -250,7 +258,7 @@ func (h *paramHelper) resolveParam(path, method, operationID string, param *spec } if err != nil { // Safeguard - // NOTE: we may enter enter here when the whole parameter is an unresolved $ref + // NOTE: we may enter here when the whole parameter is an unresolved $ref refPath := strings.Join([]string{"\"" + path + "\"", method}, ".") errorHelp.addPointerError(res, err, param.Ref.String(), refPath) return nil, res @@ -306,6 +314,7 @@ func (r *responseHelper) expandResponseRef( errorHelp.addPointerError(res, err, response.Ref.String(), path) return nil, res } + return response, res } diff --git a/vendor/github.com/go-openapi/validate/object_validator.go b/vendor/github.com/go-openapi/validate/object_validator.go index 7bb12615d..dff73fa98 100644 --- a/vendor/github.com/go-openapi/validate/object_validator.go +++ b/vendor/github.com/go-openapi/validate/object_validator.go @@ -15,8 +15,8 @@ package validate import ( + "fmt" "reflect" - "regexp" "strings" "github.com/go-openapi/errors" @@ -35,62 +35,116 @@ type objectValidator struct { PatternProperties map[string]spec.Schema Root interface{} KnownFormats strfmt.Registry - Options SchemaValidatorOptions + Options *SchemaValidatorOptions + splitPath []string +} + +func newObjectValidator(path, in string, + maxProperties, minProperties *int64, required []string, properties spec.SchemaProperties, + additionalProperties *spec.SchemaOrBool, patternProperties spec.SchemaProperties, + root interface{}, formats strfmt.Registry, opts *SchemaValidatorOptions) *objectValidator { + if opts == nil { + opts = new(SchemaValidatorOptions) + } + + var v *objectValidator + if opts.recycleValidators { + v = pools.poolOfObjectValidators.BorrowValidator() + } else { + v = new(objectValidator) + } + + v.Path = path + v.In = in + v.MaxProperties = maxProperties + v.MinProperties = minProperties + v.Required = required + v.Properties = properties + v.AdditionalProperties = additionalProperties + v.PatternProperties = patternProperties + v.Root = root + v.KnownFormats = formats + v.Options = opts + v.splitPath = strings.Split(v.Path, ".") + + return v } func (o *objectValidator) SetPath(path string) { o.Path = path + o.splitPath = strings.Split(path, ".") } func (o *objectValidator) Applies(source interface{}, kind reflect.Kind) bool { // TODO: this should also work for structs // there is a problem in the type validator where it will be unhappy about null values // so that requires more testing - r := reflect.TypeOf(source) == specSchemaType && (kind == reflect.Map || kind == reflect.Struct) - debugLog("object validator for %q applies %t for %T (kind: %v)\n", o.Path, r, source, kind) - return r + _, isSchema := source.(*spec.Schema) + return isSchema && (kind == reflect.Map || kind == reflect.Struct) } func (o *objectValidator) isProperties() bool { - p := strings.Split(o.Path, ".") + p := o.splitPath return len(p) > 1 && p[len(p)-1] == jsonProperties && p[len(p)-2] != jsonProperties } func (o *objectValidator) isDefault() bool { - p := strings.Split(o.Path, ".") + p := o.splitPath return len(p) > 1 && p[len(p)-1] == jsonDefault && p[len(p)-2] != jsonDefault } func (o *objectValidator) isExample() bool { - p := strings.Split(o.Path, ".") + p := o.splitPath return len(p) > 1 && (p[len(p)-1] == swaggerExample || p[len(p)-1] == swaggerExamples) && p[len(p)-2] != swaggerExample } func (o *objectValidator) checkArrayMustHaveItems(res *Result, val map[string]interface{}) { // for swagger 2.0 schemas, there is an additional constraint to have array items defined explicitly. // with pure jsonschema draft 4, one may have arrays with undefined items (i.e. any type). - if t, typeFound := val[jsonType]; typeFound { - if tpe, ok := t.(string); ok && tpe == arrayType { - if item, itemsKeyFound := val[jsonItems]; !itemsKeyFound { - res.AddErrors(errors.Required(jsonItems, o.Path, item)) - } - } + if val == nil { + return } + + t, typeFound := val[jsonType] + if !typeFound { + return + } + + tpe, isString := t.(string) + if !isString || tpe != arrayType { + return + } + + item, itemsKeyFound := val[jsonItems] + if itemsKeyFound { + return + } + + res.AddErrors(errors.Required(jsonItems, o.Path, item)) } func (o *objectValidator) checkItemsMustBeTypeArray(res *Result, val map[string]interface{}) { - if !o.isProperties() && !o.isDefault() && !o.isExample() { - if _, itemsKeyFound := val[jsonItems]; itemsKeyFound { - t, typeFound := val[jsonType] - if typeFound { - if tpe, ok := t.(string); !ok || tpe != arrayType { - res.AddErrors(errors.InvalidType(o.Path, o.In, arrayType, nil)) - } - } else { - // there is no type - res.AddErrors(errors.Required(jsonType, o.Path, t)) - } - } + if val == nil { + return + } + + if o.isProperties() || o.isDefault() || o.isExample() { + return + } + + _, itemsKeyFound := val[jsonItems] + if !itemsKeyFound { + return + } + + t, typeFound := val[jsonType] + if !typeFound { + // there is no type + res.AddErrors(errors.Required(jsonType, o.Path, t)) + } + + if tpe, isString := t.(string); !isString || tpe != arrayType { + res.AddErrors(errors.InvalidType(o.Path, o.In, arrayType, nil)) } } @@ -104,176 +158,274 @@ func (o *objectValidator) precheck(res *Result, val map[string]interface{}) { } func (o *objectValidator) Validate(data interface{}) *Result { - val := data.(map[string]interface{}) - // TODO: guard against nil data + if o.Options.recycleValidators { + defer func() { + o.redeem() + }() + } + + var val map[string]interface{} + if data != nil { + var ok bool + val, ok = data.(map[string]interface{}) + if !ok { + return errorHelp.sErr(invalidObjectMsg(o.Path, o.In), o.Options.recycleResult) + } + } numKeys := int64(len(val)) if o.MinProperties != nil && numKeys < *o.MinProperties { - return errorHelp.sErr(errors.TooFewProperties(o.Path, o.In, *o.MinProperties)) + return errorHelp.sErr(errors.TooFewProperties(o.Path, o.In, *o.MinProperties), o.Options.recycleResult) } if o.MaxProperties != nil && numKeys > *o.MaxProperties { - return errorHelp.sErr(errors.TooManyProperties(o.Path, o.In, *o.MaxProperties)) + return errorHelp.sErr(errors.TooManyProperties(o.Path, o.In, *o.MaxProperties), o.Options.recycleResult) } - res := new(Result) + var res *Result + if o.Options.recycleResult { + res = pools.poolOfResults.BorrowResult() + } else { + res = new(Result) + } o.precheck(res, val) // check validity of field names if o.AdditionalProperties != nil && !o.AdditionalProperties.Allows { // Case: additionalProperties: false - for k := range val { - _, regularProperty := o.Properties[k] - matched := false - - for pk := range o.PatternProperties { - if matches, _ := regexp.MatchString(pk, k); matches { - matched = true - break - } - } - - if !regularProperty && k != "$schema" && k != "id" && !matched { - // Special properties "$schema" and "id" are ignored - res.AddErrors(errors.PropertyNotAllowed(o.Path, o.In, k)) - - // BUG(fredbi): This section should move to a part dedicated to spec validation as - // it will conflict with regular schemas where a property "headers" is defined. - - // - // Croaks a more explicit message on top of the standard one - // on some recognized cases. - // - // NOTE: edge cases with invalid type assertion are simply ignored here. - // NOTE: prefix your messages here by "IMPORTANT!" so there are not filtered - // by higher level callers (the IMPORTANT! tag will be eventually - // removed). - if k == "headers" && val[k] != nil { - // $ref is forbidden in header - if headers, mapOk := val[k].(map[string]interface{}); mapOk { - for headerKey, headerBody := range headers { - if headerBody != nil { - if headerSchema, mapOfMapOk := headerBody.(map[string]interface{}); mapOfMapOk { - if _, found := headerSchema["$ref"]; found { - var msg string - if refString, stringOk := headerSchema["$ref"].(string); stringOk { - msg = strings.Join([]string{", one may not use $ref=\":", refString, "\""}, "") - } - res.AddErrors(refNotAllowedInHeaderMsg(o.Path, headerKey, msg)) - } - } - } - } - } - /* - case "$ref": - if val[k] != nil { - // TODO: check context of that ref: warn about siblings, check against invalid context - } - */ - } - } - } + o.validateNoAdditionalProperties(val, res) } else { - // Cases: no additionalProperties (implying: true), or additionalProperties: true, or additionalProperties: { <> } - for key, value := range val { - _, regularProperty := o.Properties[key] - - // Validates property against "patternProperties" if applicable - // BUG(fredbi): succeededOnce is always false - - // NOTE: how about regular properties which do not match patternProperties? - matched, succeededOnce, _ := o.validatePatternProperty(key, value, res) - - if !(regularProperty || matched || succeededOnce) { - - // Cases: properties which are not regular properties and have not been matched by the PatternProperties validator - if o.AdditionalProperties != nil && o.AdditionalProperties.Schema != nil { - // AdditionalProperties as Schema - r := NewSchemaValidator(o.AdditionalProperties.Schema, o.Root, o.Path+"."+key, o.KnownFormats, o.Options.Options()...).Validate(value) - res.mergeForField(data.(map[string]interface{}), key, r) - } else if regularProperty && !(matched || succeededOnce) { - // TODO: this is dead code since regularProperty=false here - res.AddErrors(errors.FailedAllPatternProperties(o.Path, o.In, key)) - } - } - } - // Valid cases: additionalProperties: true or undefined + // Cases: empty additionalProperties (implying: true), or additionalProperties: true, or additionalProperties: { <> } + o.validateAdditionalProperties(val, res) } - createdFromDefaults := map[string]bool{} - - // Property types: - // - regular Property - for pName := range o.Properties { - pSchema := o.Properties[pName] // one instance per iteration - rName := pName - if o.Path != "" { - rName = o.Path + "." + pName - } - - // Recursively validates each property against its schema - if v, ok := val[pName]; ok { - r := NewSchemaValidator(&pSchema, o.Root, rName, o.KnownFormats, o.Options.Options()...).Validate(v) - res.mergeForField(data.(map[string]interface{}), pName, r) - } else if pSchema.Default != nil { - // If a default value is defined, creates the property from defaults - // NOTE: JSON schema does not enforce default values to be valid against schema. Swagger does. - createdFromDefaults[pName] = true - res.addPropertySchemata(data.(map[string]interface{}), pName, &pSchema) - } - } - - // Check required properties - if len(o.Required) > 0 { - for _, k := range o.Required { - if v, ok := val[k]; !ok && !createdFromDefaults[k] { - res.AddErrors(errors.Required(o.Path+"."+k, o.In, v)) - continue - } - } - } + o.validatePropertiesSchema(val, res) // Check patternProperties // TODO: it looks like we have done that twice in many cases for key, value := range val { _, regularProperty := o.Properties[key] - matched, _ /*succeededOnce*/, patterns := o.validatePatternProperty(key, value, res) - if !regularProperty && (matched /*|| succeededOnce*/) { - for _, pName := range patterns { - if v, ok := o.PatternProperties[pName]; ok { - r := NewSchemaValidator(&v, o.Root, o.Path+"."+key, o.KnownFormats, o.Options.Options()...).Validate(value) - res.mergeForField(data.(map[string]interface{}), key, r) - } + matched, _, patterns := o.validatePatternProperty(key, value, res) // applies to regular properties as well + if regularProperty || !matched { + continue + } + + for _, pName := range patterns { + if v, ok := o.PatternProperties[pName]; ok { + r := newSchemaValidator(&v, o.Root, o.Path+"."+key, o.KnownFormats, o.Options).Validate(value) + res.mergeForField(data.(map[string]interface{}), key, r) } } } + return res } +func (o *objectValidator) validateNoAdditionalProperties(val map[string]interface{}, res *Result) { + for k := range val { + if k == "$schema" || k == "id" { + // special properties "$schema" and "id" are ignored + continue + } + + _, regularProperty := o.Properties[k] + if regularProperty { + continue + } + + matched := false + for pk := range o.PatternProperties { + re, err := compileRegexp(pk) + if err != nil { + continue + } + if matches := re.MatchString(k); matches { + matched = true + break + } + } + if matched { + continue + } + + res.AddErrors(errors.PropertyNotAllowed(o.Path, o.In, k)) + + // BUG(fredbi): This section should move to a part dedicated to spec validation as + // it will conflict with regular schemas where a property "headers" is defined. + + // + // Croaks a more explicit message on top of the standard one + // on some recognized cases. + // + // NOTE: edge cases with invalid type assertion are simply ignored here. + // NOTE: prefix your messages here by "IMPORTANT!" so there are not filtered + // by higher level callers (the IMPORTANT! tag will be eventually + // removed). + if k != "headers" || val[k] == nil { + continue + } + + // $ref is forbidden in header + headers, mapOk := val[k].(map[string]interface{}) + if !mapOk { + continue + } + + for headerKey, headerBody := range headers { + if headerBody == nil { + continue + } + + headerSchema, mapOfMapOk := headerBody.(map[string]interface{}) + if !mapOfMapOk { + continue + } + + _, found := headerSchema["$ref"] + if !found { + continue + } + + refString, stringOk := headerSchema["$ref"].(string) + if !stringOk { + continue + } + + msg := strings.Join([]string{", one may not use $ref=\":", refString, "\""}, "") + res.AddErrors(refNotAllowedInHeaderMsg(o.Path, headerKey, msg)) + /* + case "$ref": + if val[k] != nil { + // TODO: check context of that ref: warn about siblings, check against invalid context + } + */ + } + } +} + +func (o *objectValidator) validateAdditionalProperties(val map[string]interface{}, res *Result) { + for key, value := range val { + _, regularProperty := o.Properties[key] + if regularProperty { + continue + } + + // Validates property against "patternProperties" if applicable + // BUG(fredbi): succeededOnce is always false + + // NOTE: how about regular properties which do not match patternProperties? + matched, succeededOnce, _ := o.validatePatternProperty(key, value, res) + if matched || succeededOnce { + continue + } + + if o.AdditionalProperties == nil || o.AdditionalProperties.Schema == nil { + continue + } + + // Cases: properties which are not regular properties and have not been matched by the PatternProperties validator + // AdditionalProperties as Schema + r := newSchemaValidator(o.AdditionalProperties.Schema, o.Root, o.Path+"."+key, o.KnownFormats, o.Options).Validate(value) + res.mergeForField(val, key, r) + } + // Valid cases: additionalProperties: true or undefined +} + +func (o *objectValidator) validatePropertiesSchema(val map[string]interface{}, res *Result) { + createdFromDefaults := map[string]struct{}{} + + // Property types: + // - regular Property + pSchema := pools.poolOfSchemas.BorrowSchema() // recycle a spec.Schema object which lifespan extends only to the validation of properties + defer func() { + pools.poolOfSchemas.RedeemSchema(pSchema) + }() + + for pName := range o.Properties { + *pSchema = o.Properties[pName] + var rName string + if o.Path == "" { + rName = pName + } else { + rName = o.Path + "." + pName + } + + // Recursively validates each property against its schema + v, ok := val[pName] + if ok { + r := newSchemaValidator(pSchema, o.Root, rName, o.KnownFormats, o.Options).Validate(v) + res.mergeForField(val, pName, r) + + continue + } + + if pSchema.Default != nil { + // if a default value is defined, creates the property from defaults + // NOTE: JSON schema does not enforce default values to be valid against schema. Swagger does. + createdFromDefaults[pName] = struct{}{} + if !o.Options.skipSchemataResult { + res.addPropertySchemata(val, pName, pSchema) // this shallow-clones the content of the pSchema pointer + } + } + } + + if len(o.Required) == 0 { + return + } + + // Check required properties + for _, k := range o.Required { + v, ok := val[k] + if ok { + continue + } + _, isCreatedFromDefaults := createdFromDefaults[k] + if isCreatedFromDefaults { + continue + } + + res.AddErrors(errors.Required(fmt.Sprintf("%s.%s", o.Path, k), o.In, v)) + } +} + // TODO: succeededOnce is not used anywhere func (o *objectValidator) validatePatternProperty(key string, value interface{}, result *Result) (bool, bool, []string) { - matched := false - succeededOnce := false - var patterns []string - - for k, schema := range o.PatternProperties { - sch := schema - if match, _ := regexp.MatchString(k, key); match { - patterns = append(patterns, k) - matched = true - validator := NewSchemaValidator(&sch, o.Root, o.Path+"."+key, o.KnownFormats, o.Options.Options()...) - - res := validator.Validate(value) - result.Merge(res) - } + if len(o.PatternProperties) == 0 { + return false, false, nil } - // BUG(fredbi): can't get to here. Should remove dead code (commented out). + matched := false + succeededOnce := false + patterns := make([]string, 0, len(o.PatternProperties)) - // if succeededOnce { - // result.Inc() - // } + schema := pools.poolOfSchemas.BorrowSchema() + defer func() { + pools.poolOfSchemas.RedeemSchema(schema) + }() + + for k := range o.PatternProperties { + re, err := compileRegexp(k) + if err != nil { + continue + } + + match := re.MatchString(key) + if !match { + continue + } + + *schema = o.PatternProperties[k] + patterns = append(patterns, k) + matched = true + validator := newSchemaValidator(schema, o.Root, fmt.Sprintf("%s.%s", o.Path, key), o.KnownFormats, o.Options) + + res := validator.Validate(value) + result.Merge(res) + } return matched, succeededOnce, patterns } + +func (o *objectValidator) redeem() { + pools.poolOfObjectValidators.RedeemValidator(o) +} diff --git a/vendor/github.com/go-openapi/validate/options.go b/vendor/github.com/go-openapi/validate/options.go index deeec2f2e..cfe9b0660 100644 --- a/vendor/github.com/go-openapi/validate/options.go +++ b/vendor/github.com/go-openapi/validate/options.go @@ -21,10 +21,29 @@ // NOTE: other options might be needed, for example a go-swagger specific mode. type Opts struct { ContinueOnErrors bool // true: continue reporting errors, even if spec is invalid + + // StrictPathParamUniqueness enables a strict validation of paths that include + // path parameters. When true, it will enforce that for each method, the path + // is unique, regardless of path parameters such that GET:/petstore/{id} and + // GET:/petstore/{pet} anre considered duplicate paths. + // + // Consider disabling if path parameters can include slashes such as + // GET:/v1/{shelve} and GET:/v1/{book}, where the IDs are "shelve/*" and + // /"shelve/*/book/*" respectively. + StrictPathParamUniqueness bool + SkipSchemataResult bool } var ( - defaultOpts = Opts{ContinueOnErrors: false} // default is to stop validation on errors + defaultOpts = Opts{ + // default is to stop validation on errors + ContinueOnErrors: false, + + // StrictPathParamUniqueness is defaulted to true. This maintains existing + // behavior. + StrictPathParamUniqueness: true, + } + defaultOptsMutex = &sync.Mutex{} ) diff --git a/vendor/github.com/go-openapi/validate/pools.go b/vendor/github.com/go-openapi/validate/pools.go new file mode 100644 index 000000000..3ddce4dcc --- /dev/null +++ b/vendor/github.com/go-openapi/validate/pools.go @@ -0,0 +1,366 @@ +//go:build !validatedebug + +package validate + +import ( + "sync" + + "github.com/go-openapi/spec" +) + +var pools allPools + +func init() { + resetPools() +} + +func resetPools() { + // NOTE: for testing purpose, we might want to reset pools after calling Validate twice. + // The pool is corrupted in that case: calling Put twice inserts a duplicate in the pool + // and further calls to Get are mishandled. + + pools = allPools{ + poolOfSchemaValidators: schemaValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &SchemaValidator{} + + return s + }, + }, + }, + poolOfObjectValidators: objectValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &objectValidator{} + + return s + }, + }, + }, + poolOfSliceValidators: sliceValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &schemaSliceValidator{} + + return s + }, + }, + }, + poolOfItemsValidators: itemsValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &itemsValidator{} + + return s + }, + }, + }, + poolOfBasicCommonValidators: basicCommonValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &basicCommonValidator{} + + return s + }, + }, + }, + poolOfHeaderValidators: headerValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &HeaderValidator{} + + return s + }, + }, + }, + poolOfParamValidators: paramValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &ParamValidator{} + + return s + }, + }, + }, + poolOfBasicSliceValidators: basicSliceValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &basicSliceValidator{} + + return s + }, + }, + }, + poolOfNumberValidators: numberValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &numberValidator{} + + return s + }, + }, + }, + poolOfStringValidators: stringValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &stringValidator{} + + return s + }, + }, + }, + poolOfSchemaPropsValidators: schemaPropsValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &schemaPropsValidator{} + + return s + }, + }, + }, + poolOfFormatValidators: formatValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &formatValidator{} + + return s + }, + }, + }, + poolOfTypeValidators: typeValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &typeValidator{} + + return s + }, + }, + }, + poolOfSchemas: schemasPool{ + Pool: &sync.Pool{ + New: func() any { + s := &spec.Schema{} + + return s + }, + }, + }, + poolOfResults: resultsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &Result{} + + return s + }, + }, + }, + } +} + +type ( + allPools struct { + // memory pools for all validator objects. + // + // Each pool can be borrowed from and redeemed to. + poolOfSchemaValidators schemaValidatorsPool + poolOfObjectValidators objectValidatorsPool + poolOfSliceValidators sliceValidatorsPool + poolOfItemsValidators itemsValidatorsPool + poolOfBasicCommonValidators basicCommonValidatorsPool + poolOfHeaderValidators headerValidatorsPool + poolOfParamValidators paramValidatorsPool + poolOfBasicSliceValidators basicSliceValidatorsPool + poolOfNumberValidators numberValidatorsPool + poolOfStringValidators stringValidatorsPool + poolOfSchemaPropsValidators schemaPropsValidatorsPool + poolOfFormatValidators formatValidatorsPool + poolOfTypeValidators typeValidatorsPool + poolOfSchemas schemasPool + poolOfResults resultsPool + } + + schemaValidatorsPool struct { + *sync.Pool + } + + objectValidatorsPool struct { + *sync.Pool + } + + sliceValidatorsPool struct { + *sync.Pool + } + + itemsValidatorsPool struct { + *sync.Pool + } + + basicCommonValidatorsPool struct { + *sync.Pool + } + + headerValidatorsPool struct { + *sync.Pool + } + + paramValidatorsPool struct { + *sync.Pool + } + + basicSliceValidatorsPool struct { + *sync.Pool + } + + numberValidatorsPool struct { + *sync.Pool + } + + stringValidatorsPool struct { + *sync.Pool + } + + schemaPropsValidatorsPool struct { + *sync.Pool + } + + formatValidatorsPool struct { + *sync.Pool + } + + typeValidatorsPool struct { + *sync.Pool + } + + schemasPool struct { + *sync.Pool + } + + resultsPool struct { + *sync.Pool + } +) + +func (p schemaValidatorsPool) BorrowValidator() *SchemaValidator { + return p.Get().(*SchemaValidator) +} + +func (p schemaValidatorsPool) RedeemValidator(s *SchemaValidator) { + // NOTE: s might be nil. In that case, Put is a noop. + p.Put(s) +} + +func (p objectValidatorsPool) BorrowValidator() *objectValidator { + return p.Get().(*objectValidator) +} + +func (p objectValidatorsPool) RedeemValidator(s *objectValidator) { + p.Put(s) +} + +func (p sliceValidatorsPool) BorrowValidator() *schemaSliceValidator { + return p.Get().(*schemaSliceValidator) +} + +func (p sliceValidatorsPool) RedeemValidator(s *schemaSliceValidator) { + p.Put(s) +} + +func (p itemsValidatorsPool) BorrowValidator() *itemsValidator { + return p.Get().(*itemsValidator) +} + +func (p itemsValidatorsPool) RedeemValidator(s *itemsValidator) { + p.Put(s) +} + +func (p basicCommonValidatorsPool) BorrowValidator() *basicCommonValidator { + return p.Get().(*basicCommonValidator) +} + +func (p basicCommonValidatorsPool) RedeemValidator(s *basicCommonValidator) { + p.Put(s) +} + +func (p headerValidatorsPool) BorrowValidator() *HeaderValidator { + return p.Get().(*HeaderValidator) +} + +func (p headerValidatorsPool) RedeemValidator(s *HeaderValidator) { + p.Put(s) +} + +func (p paramValidatorsPool) BorrowValidator() *ParamValidator { + return p.Get().(*ParamValidator) +} + +func (p paramValidatorsPool) RedeemValidator(s *ParamValidator) { + p.Put(s) +} + +func (p basicSliceValidatorsPool) BorrowValidator() *basicSliceValidator { + return p.Get().(*basicSliceValidator) +} + +func (p basicSliceValidatorsPool) RedeemValidator(s *basicSliceValidator) { + p.Put(s) +} + +func (p numberValidatorsPool) BorrowValidator() *numberValidator { + return p.Get().(*numberValidator) +} + +func (p numberValidatorsPool) RedeemValidator(s *numberValidator) { + p.Put(s) +} + +func (p stringValidatorsPool) BorrowValidator() *stringValidator { + return p.Get().(*stringValidator) +} + +func (p stringValidatorsPool) RedeemValidator(s *stringValidator) { + p.Put(s) +} + +func (p schemaPropsValidatorsPool) BorrowValidator() *schemaPropsValidator { + return p.Get().(*schemaPropsValidator) +} + +func (p schemaPropsValidatorsPool) RedeemValidator(s *schemaPropsValidator) { + p.Put(s) +} + +func (p formatValidatorsPool) BorrowValidator() *formatValidator { + return p.Get().(*formatValidator) +} + +func (p formatValidatorsPool) RedeemValidator(s *formatValidator) { + p.Put(s) +} + +func (p typeValidatorsPool) BorrowValidator() *typeValidator { + return p.Get().(*typeValidator) +} + +func (p typeValidatorsPool) RedeemValidator(s *typeValidator) { + p.Put(s) +} + +func (p schemasPool) BorrowSchema() *spec.Schema { + return p.Get().(*spec.Schema) +} + +func (p schemasPool) RedeemSchema(s *spec.Schema) { + p.Put(s) +} + +func (p resultsPool) BorrowResult() *Result { + return p.Get().(*Result).cleared() +} + +func (p resultsPool) RedeemResult(s *Result) { + if s == emptyResult { + return + } + p.Put(s) +} diff --git a/vendor/github.com/go-openapi/validate/pools_debug.go b/vendor/github.com/go-openapi/validate/pools_debug.go new file mode 100644 index 000000000..12949f02a --- /dev/null +++ b/vendor/github.com/go-openapi/validate/pools_debug.go @@ -0,0 +1,1012 @@ +//go:build validatedebug + +package validate + +import ( + "fmt" + "runtime" + "sync" + "testing" + + "github.com/go-openapi/spec" +) + +// This version of the pools is to be used for debugging and testing, with build tag "validatedebug". +// +// In this mode, the pools are tracked for allocation and redemption of borrowed objects, so we can +// verify a few behaviors of the validators. The debug pools panic when an invalid usage pattern is detected. + +var pools allPools + +func init() { + resetPools() +} + +func resetPools() { + // NOTE: for testing purpose, we might want to reset pools after calling Validate twice. + // The pool is corrupted in that case: calling Put twice inserts a duplicate in the pool + // and further calls to Get are mishandled. + + pools = allPools{ + poolOfSchemaValidators: schemaValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &SchemaValidator{} + + return s + }, + }, + debugMap: make(map[*SchemaValidator]status), + allocMap: make(map[*SchemaValidator]string), + redeemMap: make(map[*SchemaValidator]string), + }, + poolOfObjectValidators: objectValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &objectValidator{} + + return s + }, + }, + debugMap: make(map[*objectValidator]status), + allocMap: make(map[*objectValidator]string), + redeemMap: make(map[*objectValidator]string), + }, + poolOfSliceValidators: sliceValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &schemaSliceValidator{} + + return s + }, + }, + debugMap: make(map[*schemaSliceValidator]status), + allocMap: make(map[*schemaSliceValidator]string), + redeemMap: make(map[*schemaSliceValidator]string), + }, + poolOfItemsValidators: itemsValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &itemsValidator{} + + return s + }, + }, + debugMap: make(map[*itemsValidator]status), + allocMap: make(map[*itemsValidator]string), + redeemMap: make(map[*itemsValidator]string), + }, + poolOfBasicCommonValidators: basicCommonValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &basicCommonValidator{} + + return s + }, + }, + debugMap: make(map[*basicCommonValidator]status), + allocMap: make(map[*basicCommonValidator]string), + redeemMap: make(map[*basicCommonValidator]string), + }, + poolOfHeaderValidators: headerValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &HeaderValidator{} + + return s + }, + }, + debugMap: make(map[*HeaderValidator]status), + allocMap: make(map[*HeaderValidator]string), + redeemMap: make(map[*HeaderValidator]string), + }, + poolOfParamValidators: paramValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &ParamValidator{} + + return s + }, + }, + debugMap: make(map[*ParamValidator]status), + allocMap: make(map[*ParamValidator]string), + redeemMap: make(map[*ParamValidator]string), + }, + poolOfBasicSliceValidators: basicSliceValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &basicSliceValidator{} + + return s + }, + }, + debugMap: make(map[*basicSliceValidator]status), + allocMap: make(map[*basicSliceValidator]string), + redeemMap: make(map[*basicSliceValidator]string), + }, + poolOfNumberValidators: numberValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &numberValidator{} + + return s + }, + }, + debugMap: make(map[*numberValidator]status), + allocMap: make(map[*numberValidator]string), + redeemMap: make(map[*numberValidator]string), + }, + poolOfStringValidators: stringValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &stringValidator{} + + return s + }, + }, + debugMap: make(map[*stringValidator]status), + allocMap: make(map[*stringValidator]string), + redeemMap: make(map[*stringValidator]string), + }, + poolOfSchemaPropsValidators: schemaPropsValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &schemaPropsValidator{} + + return s + }, + }, + debugMap: make(map[*schemaPropsValidator]status), + allocMap: make(map[*schemaPropsValidator]string), + redeemMap: make(map[*schemaPropsValidator]string), + }, + poolOfFormatValidators: formatValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &formatValidator{} + + return s + }, + }, + debugMap: make(map[*formatValidator]status), + allocMap: make(map[*formatValidator]string), + redeemMap: make(map[*formatValidator]string), + }, + poolOfTypeValidators: typeValidatorsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &typeValidator{} + + return s + }, + }, + debugMap: make(map[*typeValidator]status), + allocMap: make(map[*typeValidator]string), + redeemMap: make(map[*typeValidator]string), + }, + poolOfSchemas: schemasPool{ + Pool: &sync.Pool{ + New: func() any { + s := &spec.Schema{} + + return s + }, + }, + debugMap: make(map[*spec.Schema]status), + allocMap: make(map[*spec.Schema]string), + redeemMap: make(map[*spec.Schema]string), + }, + poolOfResults: resultsPool{ + Pool: &sync.Pool{ + New: func() any { + s := &Result{} + + return s + }, + }, + debugMap: make(map[*Result]status), + allocMap: make(map[*Result]string), + redeemMap: make(map[*Result]string), + }, + } +} + +const ( + statusFresh status = iota + 1 + statusRecycled + statusRedeemed +) + +func (s status) String() string { + switch s { + case statusFresh: + return "fresh" + case statusRecycled: + return "recycled" + case statusRedeemed: + return "redeemed" + default: + panic(fmt.Errorf("invalid status: %d", s)) + } +} + +type ( + // Debug + status uint8 + + allPools struct { + // memory pools for all validator objects. + // + // Each pool can be borrowed from and redeemed to. + poolOfSchemaValidators schemaValidatorsPool + poolOfObjectValidators objectValidatorsPool + poolOfSliceValidators sliceValidatorsPool + poolOfItemsValidators itemsValidatorsPool + poolOfBasicCommonValidators basicCommonValidatorsPool + poolOfHeaderValidators headerValidatorsPool + poolOfParamValidators paramValidatorsPool + poolOfBasicSliceValidators basicSliceValidatorsPool + poolOfNumberValidators numberValidatorsPool + poolOfStringValidators stringValidatorsPool + poolOfSchemaPropsValidators schemaPropsValidatorsPool + poolOfFormatValidators formatValidatorsPool + poolOfTypeValidators typeValidatorsPool + poolOfSchemas schemasPool + poolOfResults resultsPool + } + + schemaValidatorsPool struct { + *sync.Pool + debugMap map[*SchemaValidator]status + allocMap map[*SchemaValidator]string + redeemMap map[*SchemaValidator]string + mx sync.Mutex + } + + objectValidatorsPool struct { + *sync.Pool + debugMap map[*objectValidator]status + allocMap map[*objectValidator]string + redeemMap map[*objectValidator]string + mx sync.Mutex + } + + sliceValidatorsPool struct { + *sync.Pool + debugMap map[*schemaSliceValidator]status + allocMap map[*schemaSliceValidator]string + redeemMap map[*schemaSliceValidator]string + mx sync.Mutex + } + + itemsValidatorsPool struct { + *sync.Pool + debugMap map[*itemsValidator]status + allocMap map[*itemsValidator]string + redeemMap map[*itemsValidator]string + mx sync.Mutex + } + + basicCommonValidatorsPool struct { + *sync.Pool + debugMap map[*basicCommonValidator]status + allocMap map[*basicCommonValidator]string + redeemMap map[*basicCommonValidator]string + mx sync.Mutex + } + + headerValidatorsPool struct { + *sync.Pool + debugMap map[*HeaderValidator]status + allocMap map[*HeaderValidator]string + redeemMap map[*HeaderValidator]string + mx sync.Mutex + } + + paramValidatorsPool struct { + *sync.Pool + debugMap map[*ParamValidator]status + allocMap map[*ParamValidator]string + redeemMap map[*ParamValidator]string + mx sync.Mutex + } + + basicSliceValidatorsPool struct { + *sync.Pool + debugMap map[*basicSliceValidator]status + allocMap map[*basicSliceValidator]string + redeemMap map[*basicSliceValidator]string + mx sync.Mutex + } + + numberValidatorsPool struct { + *sync.Pool + debugMap map[*numberValidator]status + allocMap map[*numberValidator]string + redeemMap map[*numberValidator]string + mx sync.Mutex + } + + stringValidatorsPool struct { + *sync.Pool + debugMap map[*stringValidator]status + allocMap map[*stringValidator]string + redeemMap map[*stringValidator]string + mx sync.Mutex + } + + schemaPropsValidatorsPool struct { + *sync.Pool + debugMap map[*schemaPropsValidator]status + allocMap map[*schemaPropsValidator]string + redeemMap map[*schemaPropsValidator]string + mx sync.Mutex + } + + formatValidatorsPool struct { + *sync.Pool + debugMap map[*formatValidator]status + allocMap map[*formatValidator]string + redeemMap map[*formatValidator]string + mx sync.Mutex + } + + typeValidatorsPool struct { + *sync.Pool + debugMap map[*typeValidator]status + allocMap map[*typeValidator]string + redeemMap map[*typeValidator]string + mx sync.Mutex + } + + schemasPool struct { + *sync.Pool + debugMap map[*spec.Schema]status + allocMap map[*spec.Schema]string + redeemMap map[*spec.Schema]string + mx sync.Mutex + } + + resultsPool struct { + *sync.Pool + debugMap map[*Result]status + allocMap map[*Result]string + redeemMap map[*Result]string + mx sync.Mutex + } +) + +func (p *schemaValidatorsPool) BorrowValidator() *SchemaValidator { + s := p.Get().(*SchemaValidator) + + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + p.debugMap[s] = statusFresh + } else { + if x != statusRedeemed { + panic("recycled schema should have been redeemed") + } + p.debugMap[s] = statusRecycled + } + p.allocMap[s] = caller() + + return s +} + +func (p *schemaValidatorsPool) RedeemValidator(s *SchemaValidator) { + // NOTE: s might be nil. In that case, Put is a noop. + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + panic("redeemed schema should have been allocated") + } + if x != statusRecycled && x != statusFresh { + panic("redeemed schema should have been allocated from a fresh or recycled pointer") + } + p.debugMap[s] = statusRedeemed + p.redeemMap[s] = caller() + p.Put(s) +} + +func (p *objectValidatorsPool) BorrowValidator() *objectValidator { + s := p.Get().(*objectValidator) + + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + p.debugMap[s] = statusFresh + } else { + if x != statusRedeemed { + panic("recycled object should have been redeemed") + } + p.debugMap[s] = statusRecycled + } + p.allocMap[s] = caller() + + return s +} + +func (p *objectValidatorsPool) RedeemValidator(s *objectValidator) { + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + panic("redeemed object should have been allocated") + } + if x != statusRecycled && x != statusFresh { + panic("redeemed object should have been allocated from a fresh or recycled pointer") + } + p.debugMap[s] = statusRedeemed + p.redeemMap[s] = caller() + p.Put(s) +} + +func (p *sliceValidatorsPool) BorrowValidator() *schemaSliceValidator { + s := p.Get().(*schemaSliceValidator) + + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + p.debugMap[s] = statusFresh + } else { + if x != statusRedeemed { + panic("recycled schemaSliceValidator should have been redeemed") + } + p.debugMap[s] = statusRecycled + } + p.allocMap[s] = caller() + + return s +} + +func (p *sliceValidatorsPool) RedeemValidator(s *schemaSliceValidator) { + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + panic("redeemed schemaSliceValidator should have been allocated") + } + if x != statusRecycled && x != statusFresh { + panic("redeemed schemaSliceValidator should have been allocated from a fresh or recycled pointer") + } + p.debugMap[s] = statusRedeemed + p.redeemMap[s] = caller() + p.Put(s) +} + +func (p *itemsValidatorsPool) BorrowValidator() *itemsValidator { + s := p.Get().(*itemsValidator) + + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + p.debugMap[s] = statusFresh + } else { + if x != statusRedeemed { + panic("recycled itemsValidator should have been redeemed") + } + p.debugMap[s] = statusRecycled + } + p.allocMap[s] = caller() + + return s +} + +func (p *itemsValidatorsPool) RedeemValidator(s *itemsValidator) { + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + panic("redeemed itemsValidator should have been allocated") + } + if x != statusRecycled && x != statusFresh { + panic("redeemed itemsValidator should have been allocated from a fresh or recycled pointer") + } + p.debugMap[s] = statusRedeemed + p.redeemMap[s] = caller() + p.Put(s) +} + +func (p *basicCommonValidatorsPool) BorrowValidator() *basicCommonValidator { + s := p.Get().(*basicCommonValidator) + + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + p.debugMap[s] = statusFresh + } else { + if x != statusRedeemed { + panic("recycled basicCommonValidator should have been redeemed") + } + p.debugMap[s] = statusRecycled + } + p.allocMap[s] = caller() + + return s +} + +func (p *basicCommonValidatorsPool) RedeemValidator(s *basicCommonValidator) { + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + panic("redeemed basicCommonValidator should have been allocated") + } + if x != statusRecycled && x != statusFresh { + panic("redeemed basicCommonValidator should have been allocated from a fresh or recycled pointer") + } + p.debugMap[s] = statusRedeemed + p.redeemMap[s] = caller() + p.Put(s) +} + +func (p *headerValidatorsPool) BorrowValidator() *HeaderValidator { + s := p.Get().(*HeaderValidator) + + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + p.debugMap[s] = statusFresh + } else { + if x != statusRedeemed { + panic("recycled HeaderValidator should have been redeemed") + } + p.debugMap[s] = statusRecycled + } + p.allocMap[s] = caller() + + return s +} + +func (p *headerValidatorsPool) RedeemValidator(s *HeaderValidator) { + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + panic("redeemed header should have been allocated") + } + if x != statusRecycled && x != statusFresh { + panic("redeemed header should have been allocated from a fresh or recycled pointer") + } + p.debugMap[s] = statusRedeemed + p.redeemMap[s] = caller() + p.Put(s) +} + +func (p *paramValidatorsPool) BorrowValidator() *ParamValidator { + s := p.Get().(*ParamValidator) + + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + p.debugMap[s] = statusFresh + } else { + if x != statusRedeemed { + panic("recycled param should have been redeemed") + } + p.debugMap[s] = statusRecycled + } + p.allocMap[s] = caller() + + return s +} + +func (p *paramValidatorsPool) RedeemValidator(s *ParamValidator) { + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + panic("redeemed param should have been allocated") + } + if x != statusRecycled && x != statusFresh { + panic("redeemed param should have been allocated from a fresh or recycled pointer") + } + p.debugMap[s] = statusRedeemed + p.redeemMap[s] = caller() + p.Put(s) +} + +func (p *basicSliceValidatorsPool) BorrowValidator() *basicSliceValidator { + s := p.Get().(*basicSliceValidator) + + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + p.debugMap[s] = statusFresh + } else { + if x != statusRedeemed { + panic("recycled basicSliceValidator should have been redeemed") + } + p.debugMap[s] = statusRecycled + } + p.allocMap[s] = caller() + + return s +} + +func (p *basicSliceValidatorsPool) RedeemValidator(s *basicSliceValidator) { + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + panic("redeemed basicSliceValidator should have been allocated") + } + if x != statusRecycled && x != statusFresh { + panic("redeemed basicSliceValidator should have been allocated from a fresh or recycled pointer") + } + p.debugMap[s] = statusRedeemed + p.redeemMap[s] = caller() + p.Put(s) +} + +func (p *numberValidatorsPool) BorrowValidator() *numberValidator { + s := p.Get().(*numberValidator) + + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + p.debugMap[s] = statusFresh + } else { + if x != statusRedeemed { + panic("recycled number should have been redeemed") + } + p.debugMap[s] = statusRecycled + } + p.allocMap[s] = caller() + + return s +} + +func (p *numberValidatorsPool) RedeemValidator(s *numberValidator) { + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + panic("redeemed number should have been allocated") + } + if x != statusRecycled && x != statusFresh { + panic("redeemed number should have been allocated from a fresh or recycled pointer") + } + p.debugMap[s] = statusRedeemed + p.redeemMap[s] = caller() + p.Put(s) +} + +func (p *stringValidatorsPool) BorrowValidator() *stringValidator { + s := p.Get().(*stringValidator) + + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + p.debugMap[s] = statusFresh + } else { + if x != statusRedeemed { + panic("recycled string should have been redeemed") + } + p.debugMap[s] = statusRecycled + } + p.allocMap[s] = caller() + + return s +} + +func (p *stringValidatorsPool) RedeemValidator(s *stringValidator) { + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + panic("redeemed string should have been allocated") + } + if x != statusRecycled && x != statusFresh { + panic("redeemed string should have been allocated from a fresh or recycled pointer") + } + p.debugMap[s] = statusRedeemed + p.redeemMap[s] = caller() + p.Put(s) +} + +func (p *schemaPropsValidatorsPool) BorrowValidator() *schemaPropsValidator { + s := p.Get().(*schemaPropsValidator) + + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + p.debugMap[s] = statusFresh + } else { + if x != statusRedeemed { + panic("recycled param should have been redeemed") + } + p.debugMap[s] = statusRecycled + } + p.allocMap[s] = caller() + + return s +} + +func (p *schemaPropsValidatorsPool) RedeemValidator(s *schemaPropsValidator) { + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + panic("redeemed schemaProps should have been allocated") + } + if x != statusRecycled && x != statusFresh { + panic("redeemed schemaProps should have been allocated from a fresh or recycled pointer") + } + p.debugMap[s] = statusRedeemed + p.redeemMap[s] = caller() + p.Put(s) +} + +func (p *formatValidatorsPool) BorrowValidator() *formatValidator { + s := p.Get().(*formatValidator) + + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + p.debugMap[s] = statusFresh + } else { + if x != statusRedeemed { + panic("recycled format should have been redeemed") + } + p.debugMap[s] = statusRecycled + } + p.allocMap[s] = caller() + + return s +} + +func (p *formatValidatorsPool) RedeemValidator(s *formatValidator) { + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + panic("redeemed format should have been allocated") + } + if x != statusRecycled && x != statusFresh { + panic("redeemed format should have been allocated from a fresh or recycled pointer") + } + p.debugMap[s] = statusRedeemed + p.redeemMap[s] = caller() + p.Put(s) +} + +func (p *typeValidatorsPool) BorrowValidator() *typeValidator { + s := p.Get().(*typeValidator) + + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + p.debugMap[s] = statusFresh + } else { + if x != statusRedeemed { + panic("recycled type should have been redeemed") + } + p.debugMap[s] = statusRecycled + } + p.allocMap[s] = caller() + + return s +} + +func (p *typeValidatorsPool) RedeemValidator(s *typeValidator) { + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + panic("redeemed type should have been allocated") + } + if x != statusRecycled && x != statusFresh { + panic(fmt.Errorf("redeemed type should have been allocated from a fresh or recycled pointer. Got status %s, already redeamed at: %s", x, p.redeemMap[s])) + } + p.debugMap[s] = statusRedeemed + p.redeemMap[s] = caller() + p.Put(s) +} + +func (p *schemasPool) BorrowSchema() *spec.Schema { + s := p.Get().(*spec.Schema) + + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + p.debugMap[s] = statusFresh + } else { + if x != statusRedeemed { + panic("recycled spec.Schema should have been redeemed") + } + p.debugMap[s] = statusRecycled + } + p.allocMap[s] = caller() + + return s +} + +func (p *schemasPool) RedeemSchema(s *spec.Schema) { + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + panic("redeemed spec.Schema should have been allocated") + } + if x != statusRecycled && x != statusFresh { + panic("redeemed spec.Schema should have been allocated from a fresh or recycled pointer") + } + p.debugMap[s] = statusRedeemed + p.redeemMap[s] = caller() + p.Put(s) +} + +func (p *resultsPool) BorrowResult() *Result { + s := p.Get().(*Result).cleared() + + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + p.debugMap[s] = statusFresh + } else { + if x != statusRedeemed { + panic("recycled result should have been redeemed") + } + p.debugMap[s] = statusRecycled + } + p.allocMap[s] = caller() + + return s +} + +func (p *resultsPool) RedeemResult(s *Result) { + if s == emptyResult { + if len(s.Errors) > 0 || len(s.Warnings) > 0 { + panic("empty result should not mutate") + } + return + } + p.mx.Lock() + defer p.mx.Unlock() + x, ok := p.debugMap[s] + if !ok { + panic("redeemed Result should have been allocated") + } + if x != statusRecycled && x != statusFresh { + panic("redeemed Result should have been allocated from a fresh or recycled pointer") + } + p.debugMap[s] = statusRedeemed + p.redeemMap[s] = caller() + p.Put(s) +} + +func (p *allPools) allIsRedeemed(t testing.TB) bool { + outcome := true + for k, v := range p.poolOfSchemaValidators.debugMap { + if v == statusRedeemed { + continue + } + t.Logf("schemaValidator should be redeemed. Allocated by: %s", p.poolOfSchemaValidators.allocMap[k]) + outcome = false + } + for k, v := range p.poolOfObjectValidators.debugMap { + if v == statusRedeemed { + continue + } + t.Logf("objectValidator should be redeemed. Allocated by: %s", p.poolOfObjectValidators.allocMap[k]) + outcome = false + } + for k, v := range p.poolOfSliceValidators.debugMap { + if v == statusRedeemed { + continue + } + t.Logf("sliceValidator should be redeemed. Allocated by: %s", p.poolOfSliceValidators.allocMap[k]) + outcome = false + } + for k, v := range p.poolOfItemsValidators.debugMap { + if v == statusRedeemed { + continue + } + t.Logf("itemsValidator should be redeemed. Allocated by: %s", p.poolOfItemsValidators.allocMap[k]) + outcome = false + } + for k, v := range p.poolOfBasicCommonValidators.debugMap { + if v == statusRedeemed { + continue + } + t.Logf("basicCommonValidator should be redeemed. Allocated by: %s", p.poolOfBasicCommonValidators.allocMap[k]) + outcome = false + } + for k, v := range p.poolOfHeaderValidators.debugMap { + if v == statusRedeemed { + continue + } + t.Logf("headerValidator should be redeemed. Allocated by: %s", p.poolOfHeaderValidators.allocMap[k]) + outcome = false + } + for k, v := range p.poolOfParamValidators.debugMap { + if v == statusRedeemed { + continue + } + t.Logf("paramValidator should be redeemed. Allocated by: %s", p.poolOfParamValidators.allocMap[k]) + outcome = false + } + for k, v := range p.poolOfBasicSliceValidators.debugMap { + if v == statusRedeemed { + continue + } + t.Logf("basicSliceValidator should be redeemed. Allocated by: %s", p.poolOfBasicSliceValidators.allocMap[k]) + outcome = false + } + for k, v := range p.poolOfNumberValidators.debugMap { + if v == statusRedeemed { + continue + } + t.Logf("numberValidator should be redeemed. Allocated by: %s", p.poolOfNumberValidators.allocMap[k]) + outcome = false + } + for k, v := range p.poolOfStringValidators.debugMap { + if v == statusRedeemed { + continue + } + t.Logf("stringValidator should be redeemed. Allocated by: %s", p.poolOfStringValidators.allocMap[k]) + outcome = false + } + for k, v := range p.poolOfSchemaPropsValidators.debugMap { + if v == statusRedeemed { + continue + } + t.Logf("schemaPropsValidator should be redeemed. Allocated by: %s", p.poolOfSchemaPropsValidators.allocMap[k]) + outcome = false + } + for k, v := range p.poolOfFormatValidators.debugMap { + if v == statusRedeemed { + continue + } + t.Logf("formatValidator should be redeemed. Allocated by: %s", p.poolOfFormatValidators.allocMap[k]) + outcome = false + } + for k, v := range p.poolOfTypeValidators.debugMap { + if v == statusRedeemed { + continue + } + t.Logf("typeValidator should be redeemed. Allocated by: %s", p.poolOfTypeValidators.allocMap[k]) + outcome = false + } + for k, v := range p.poolOfSchemas.debugMap { + if v == statusRedeemed { + continue + } + t.Logf("schemas should be redeemed. Allocated by: %s", p.poolOfSchemas.allocMap[k]) + outcome = false + } + for k, v := range p.poolOfResults.debugMap { + if v == statusRedeemed { + continue + } + t.Logf("result should be redeemed. Allocated by: %s", p.poolOfResults.allocMap[k]) + outcome = false + } + + return outcome +} + +func caller() string { + pc, _, _, _ := runtime.Caller(3) //nolint:dogsled + from, line := runtime.FuncForPC(pc).FileLine(pc) + + return fmt.Sprintf("%s:%d", from, line) +} diff --git a/vendor/github.com/go-openapi/validate/result.go b/vendor/github.com/go-openapi/validate/result.go index 8f5f935e5..c80804a93 100644 --- a/vendor/github.com/go-openapi/validate/result.go +++ b/vendor/github.com/go-openapi/validate/result.go @@ -15,7 +15,7 @@ package validate import ( - "fmt" + stderrors "errors" "reflect" "strings" @@ -23,6 +23,8 @@ "github.com/go-openapi/spec" ) +var emptyResult = &Result{MatchCount: 1} + // Result represents a validation result set, composed of // errors and warnings. // @@ -50,8 +52,10 @@ type Result struct { // Schemata for slice items itemSchemata []itemSchemata - cachedFieldSchemta map[FieldKey][]*spec.Schema - cachedItemSchemata map[ItemKey][]*spec.Schema + cachedFieldSchemata map[FieldKey][]*spec.Schema + cachedItemSchemata map[ItemKey][]*spec.Schema + + wantsRedeemOnMerge bool } // FieldKey is a pair of an object and a field, usable as a key for a map. @@ -116,6 +120,9 @@ func (r *Result) Merge(others ...*Result) *Result { } r.mergeWithoutRootSchemata(other) r.rootObjectSchemata.Append(other.rootObjectSchemata) + if other.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(other) + } } return r } @@ -132,10 +139,9 @@ func (r *Result) RootObjectSchemata() []*spec.Schema { } // FieldSchemata returns the schemata which apply to fields in objects. -// nolint: dupl func (r *Result) FieldSchemata() map[FieldKey][]*spec.Schema { - if r.cachedFieldSchemta != nil { - return r.cachedFieldSchemta + if r.cachedFieldSchemata != nil { + return r.cachedFieldSchemata } ret := make(map[FieldKey][]*spec.Schema, len(r.fieldSchemata)) @@ -147,12 +153,12 @@ func (r *Result) FieldSchemata() map[FieldKey][]*spec.Schema { ret[key] = append(ret[key], fs.schemata.multiple...) } } - r.cachedFieldSchemta = ret + r.cachedFieldSchemata = ret + return ret } // ItemSchemata returns the schemata which apply to items in slices. -// nolint: dupl func (r *Result) ItemSchemata() map[ItemKey][]*spec.Schema { if r.cachedItemSchemata != nil { return r.cachedItemSchemata @@ -172,12 +178,13 @@ func (r *Result) ItemSchemata() map[ItemKey][]*spec.Schema { } func (r *Result) resetCaches() { - r.cachedFieldSchemta = nil + r.cachedFieldSchemata = nil r.cachedItemSchemata = nil } // mergeForField merges other into r, assigning other's root schemata to the given Object and field name. -// nolint: unparam +// +//nolint:unparam func (r *Result) mergeForField(obj map[string]interface{}, field string, other *Result) *Result { if other == nil { return r @@ -188,18 +195,23 @@ func (r *Result) mergeForField(obj map[string]interface{}, field string, other * if r.fieldSchemata == nil { r.fieldSchemata = make([]fieldSchemata, len(obj)) } + // clone other schemata, as other is about to be redeemed to the pool r.fieldSchemata = append(r.fieldSchemata, fieldSchemata{ obj: obj, field: field, - schemata: other.rootObjectSchemata, + schemata: other.rootObjectSchemata.Clone(), }) } + if other.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(other) + } return r } // mergeForSlice merges other into r, assigning other's root schemata to the given slice and index. -// nolint: unparam +// +//nolint:unparam func (r *Result) mergeForSlice(slice reflect.Value, i int, other *Result) *Result { if other == nil { return r @@ -210,29 +222,38 @@ func (r *Result) mergeForSlice(slice reflect.Value, i int, other *Result) *Resul if r.itemSchemata == nil { r.itemSchemata = make([]itemSchemata, slice.Len()) } + // clone other schemata, as other is about to be redeemed to the pool r.itemSchemata = append(r.itemSchemata, itemSchemata{ slice: slice, index: i, - schemata: other.rootObjectSchemata, + schemata: other.rootObjectSchemata.Clone(), }) } + if other.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(other) + } + return r } // addRootObjectSchemata adds the given schemata for the root object of the result. -// The slice schemata might be reused. I.e. do not modify it after being added to a result. +// +// Since the slice schemata might be reused, it is shallow-cloned before saving it into the result. func (r *Result) addRootObjectSchemata(s *spec.Schema) { - r.rootObjectSchemata.Append(schemata{one: s}) + clone := *s + r.rootObjectSchemata.Append(schemata{one: &clone}) } // addPropertySchemata adds the given schemata for the object and field. -// The slice schemata might be reused. I.e. do not modify it after being added to a result. +// +// Since the slice schemata might be reused, it is shallow-cloned before saving it into the result. func (r *Result) addPropertySchemata(obj map[string]interface{}, fld string, schema *spec.Schema) { if r.fieldSchemata == nil { r.fieldSchemata = make([]fieldSchemata, 0, len(obj)) } - r.fieldSchemata = append(r.fieldSchemata, fieldSchemata{obj: obj, field: fld, schemata: schemata{one: schema}}) + clone := *schema + r.fieldSchemata = append(r.fieldSchemata, fieldSchemata{obj: obj, field: fld, schemata: schemata{one: &clone}}) } /* @@ -255,17 +276,21 @@ func (r *Result) mergeWithoutRootSchemata(other *Result) { if other.fieldSchemata != nil { if r.fieldSchemata == nil { - r.fieldSchemata = other.fieldSchemata - } else { - r.fieldSchemata = append(r.fieldSchemata, other.fieldSchemata...) + r.fieldSchemata = make([]fieldSchemata, 0, len(other.fieldSchemata)) + } + for _, field := range other.fieldSchemata { + field.schemata = field.schemata.Clone() + r.fieldSchemata = append(r.fieldSchemata, field) } } if other.itemSchemata != nil { if r.itemSchemata == nil { - r.itemSchemata = other.itemSchemata - } else { - r.itemSchemata = append(r.itemSchemata, other.itemSchemata...) + r.itemSchemata = make([]itemSchemata, 0, len(other.itemSchemata)) + } + for _, field := range other.itemSchemata { + field.schemata = field.schemata.Clone() + r.itemSchemata = append(r.itemSchemata, field) } } } @@ -280,6 +305,9 @@ func (r *Result) MergeAsErrors(others ...*Result) *Result { r.AddErrors(other.Errors...) r.AddErrors(other.Warnings...) r.MatchCount += other.MatchCount + if other.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(other) + } } } return r @@ -295,6 +323,9 @@ func (r *Result) MergeAsWarnings(others ...*Result) *Result { r.AddWarnings(other.Errors...) r.AddWarnings(other.Warnings...) r.MatchCount += other.MatchCount + if other.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(other) + } } } return r @@ -356,16 +387,21 @@ func (r *Result) keepRelevantErrors() *Result { strippedErrors := []error{} for _, e := range r.Errors { if strings.HasPrefix(e.Error(), "IMPORTANT!") { - strippedErrors = append(strippedErrors, fmt.Errorf(strings.TrimPrefix(e.Error(), "IMPORTANT!"))) + strippedErrors = append(strippedErrors, stderrors.New(strings.TrimPrefix(e.Error(), "IMPORTANT!"))) } } strippedWarnings := []error{} for _, e := range r.Warnings { if strings.HasPrefix(e.Error(), "IMPORTANT!") { - strippedWarnings = append(strippedWarnings, fmt.Errorf(strings.TrimPrefix(e.Error(), "IMPORTANT!"))) + strippedWarnings = append(strippedWarnings, stderrors.New(strings.TrimPrefix(e.Error(), "IMPORTANT!"))) } } - strippedResult := new(Result) + var strippedResult *Result + if r.wantsRedeemOnMerge { + strippedResult = pools.poolOfResults.BorrowResult() + } else { + strippedResult = new(Result) + } strippedResult.Errors = strippedErrors strippedResult.Warnings = strippedWarnings return strippedResult @@ -427,6 +463,27 @@ func (r *Result) AsError() error { return errors.CompositeValidationError(r.Errors...) } +func (r *Result) cleared() *Result { + // clear the Result to be reusable. Keep allocated capacity. + r.Errors = r.Errors[:0] + r.Warnings = r.Warnings[:0] + r.MatchCount = 0 + r.data = nil + r.rootObjectSchemata.one = nil + r.rootObjectSchemata.multiple = r.rootObjectSchemata.multiple[:0] + r.fieldSchemata = r.fieldSchemata[:0] + r.itemSchemata = r.itemSchemata[:0] + for k := range r.cachedFieldSchemata { + delete(r.cachedFieldSchemata, k) + } + for k := range r.cachedItemSchemata { + delete(r.cachedItemSchemata, k) + } + r.wantsRedeemOnMerge = true // mark this result as eligible for redeem when merged into another + + return r +} + // schemata is an arbitrary number of schemata. It does a distinction between zero, // one and many schemata to avoid slice allocations. type schemata struct { @@ -453,7 +510,7 @@ func (s *schemata) Slice() []*spec.Schema { return s.multiple } -// appendSchemata appends the schemata in other to s. It mutated s in-place. +// appendSchemata appends the schemata in other to s. It mutates s in-place. func (s *schemata) Append(other schemata) { if other.one == nil && len(other.multiple) == 0 { return @@ -484,3 +541,23 @@ func (s *schemata) Append(other schemata) { } } } + +func (s schemata) Clone() schemata { + var clone schemata + + if s.one != nil { + clone.one = new(spec.Schema) + *clone.one = *s.one + } + + if len(s.multiple) > 0 { + clone.multiple = make([]*spec.Schema, len(s.multiple)) + for idx := 0; idx < len(s.multiple); idx++ { + sp := new(spec.Schema) + *sp = *s.multiple[idx] + clone.multiple[idx] = sp + } + } + + return clone +} diff --git a/vendor/github.com/go-openapi/validate/schema.go b/vendor/github.com/go-openapi/validate/schema.go index b817eb0ef..db65264fd 100644 --- a/vendor/github.com/go-openapi/validate/schema.go +++ b/vendor/github.com/go-openapi/validate/schema.go @@ -24,32 +24,32 @@ "github.com/go-openapi/swag" ) -var ( - specSchemaType = reflect.TypeOf(&spec.Schema{}) - specParameterType = reflect.TypeOf(&spec.Parameter{}) - specHeaderType = reflect.TypeOf(&spec.Header{}) - // specItemsType = reflect.TypeOf(&spec.Items{}) -) - // SchemaValidator validates data against a JSON schema type SchemaValidator struct { Path string in string Schema *spec.Schema - validators []valueValidator + validators [8]valueValidator Root interface{} KnownFormats strfmt.Registry - Options SchemaValidatorOptions + Options *SchemaValidatorOptions } // AgainstSchema validates the specified data against the provided schema, using a registry of supported formats. // // When no pre-parsed *spec.Schema structure is provided, it uses a JSON schema as default. See example. func AgainstSchema(schema *spec.Schema, data interface{}, formats strfmt.Registry, options ...Option) error { - res := NewSchemaValidator(schema, nil, "", formats, options...).Validate(data) + res := NewSchemaValidator(schema, nil, "", formats, + append(options, WithRecycleValidators(true), withRecycleResults(true))..., + ).Validate(data) + defer func() { + pools.poolOfResults.RedeemResult(res) + }() + if res.HasErrors() { return errors.CompositeValidationError(res.Errors...) } + return nil } @@ -57,6 +57,15 @@ func AgainstSchema(schema *spec.Schema, data interface{}, formats strfmt.Registr // // Panics if the provided schema is invalid. func NewSchemaValidator(schema *spec.Schema, rootSchema interface{}, root string, formats strfmt.Registry, options ...Option) *SchemaValidator { + opts := new(SchemaValidatorOptions) + for _, o := range options { + o(opts) + } + + return newSchemaValidator(schema, rootSchema, root, formats, opts) +} + +func newSchemaValidator(schema *spec.Schema, rootSchema interface{}, root string, formats strfmt.Registry, opts *SchemaValidatorOptions) *SchemaValidator { if schema == nil { return nil } @@ -72,17 +81,26 @@ func NewSchemaValidator(schema *spec.Schema, rootSchema interface{}, root string panic(msg) } } - s := SchemaValidator{ - Path: root, - in: "body", - Schema: schema, - Root: rootSchema, - KnownFormats: formats, - Options: SchemaValidatorOptions{}} - for _, o := range options { - o(&s.Options) + + if opts == nil { + opts = new(SchemaValidatorOptions) } - s.validators = []valueValidator{ + + var s *SchemaValidator + if opts.recycleValidators { + s = pools.poolOfSchemaValidators.BorrowValidator() + } else { + s = new(SchemaValidator) + } + + s.Path = root + s.in = "body" + s.Schema = schema + s.Root = rootSchema + s.Options = opts + s.KnownFormats = formats + + s.validators = [8]valueValidator{ s.typeValidator(), s.schemaPropsValidator(), s.stringValidator(), @@ -92,7 +110,8 @@ func NewSchemaValidator(schema *spec.Schema, rootSchema interface{}, root string s.commonValidator(), s.objectValidator(), } - return &s + + return s } // SetPath sets the path for this schema valdiator @@ -101,24 +120,46 @@ func (s *SchemaValidator) SetPath(path string) { } // Applies returns true when this schema validator applies -func (s *SchemaValidator) Applies(source interface{}, kind reflect.Kind) bool { +func (s *SchemaValidator) Applies(source interface{}, _ reflect.Kind) bool { _, ok := source.(*spec.Schema) return ok } // Validate validates the data against the schema func (s *SchemaValidator) Validate(data interface{}) *Result { - result := &Result{data: data} if s == nil { - return result + return emptyResult } - if s.Schema != nil { + + if s.Options.recycleValidators { + defer func() { + s.redeemChildren() + s.redeem() // one-time use validator + }() + } + + var result *Result + if s.Options.recycleResult { + result = pools.poolOfResults.BorrowResult() + result.data = data + } else { + result = &Result{data: data} + } + + if s.Schema != nil && !s.Options.skipSchemataResult { result.addRootObjectSchemata(s.Schema) } if data == nil { + // early exit with minimal validation result.Merge(s.validators[0].Validate(data)) // type validator result.Merge(s.validators[6].Validate(data)) // common validator + + if s.Options.recycleValidators { + s.validators[0] = nil + s.validators[6] = nil + } + return result } @@ -147,6 +188,7 @@ func (s *SchemaValidator) Validate(data interface{}) *Result { if erri != nil { result.AddErrors(invalidTypeConversionMsg(s.Path, erri)) result.Inc() + return result } d = in @@ -155,6 +197,7 @@ func (s *SchemaValidator) Validate(data interface{}) *Result { if errf != nil { result.AddErrors(invalidTypeConversionMsg(s.Path, errf)) result.Inc() + return result } d = nf @@ -164,14 +207,26 @@ func (s *SchemaValidator) Validate(data interface{}) *Result { kind = tpe.Kind() } - for _, v := range s.validators { + for idx, v := range s.validators { if !v.Applies(s.Schema, kind) { - debugLog("%T does not apply for %v", v, kind) + if s.Options.recycleValidators { + // Validate won't be called, so relinquish this validator + if redeemableChildren, ok := v.(interface{ redeemChildren() }); ok { + redeemableChildren.redeemChildren() + } + if redeemable, ok := v.(interface{ redeem() }); ok { + redeemable.redeem() + } + s.validators[idx] = nil // prevents further (unsafe) usage + } + continue } - err := v.Validate(d) - result.Merge(err) + result.Merge(v.Validate(d)) + if s.Options.recycleValidators { + s.validators[idx] = nil // prevents further (unsafe) usage + } result.Inc() } result.Inc() @@ -180,81 +235,120 @@ func (s *SchemaValidator) Validate(data interface{}) *Result { } func (s *SchemaValidator) typeValidator() valueValidator { - return &typeValidator{Type: s.Schema.Type, Nullable: s.Schema.Nullable, Format: s.Schema.Format, In: s.in, Path: s.Path} + return newTypeValidator( + s.Path, + s.in, + s.Schema.Type, + s.Schema.Nullable, + s.Schema.Format, + s.Options, + ) } func (s *SchemaValidator) commonValidator() valueValidator { - return &basicCommonValidator{ - Path: s.Path, - In: s.in, - Enum: s.Schema.Enum, - } + return newBasicCommonValidator( + s.Path, + s.in, + s.Schema.Default, + s.Schema.Enum, + s.Options, + ) } func (s *SchemaValidator) sliceValidator() valueValidator { - return &schemaSliceValidator{ - Path: s.Path, - In: s.in, - MaxItems: s.Schema.MaxItems, - MinItems: s.Schema.MinItems, - UniqueItems: s.Schema.UniqueItems, - AdditionalItems: s.Schema.AdditionalItems, - Items: s.Schema.Items, - Root: s.Root, - KnownFormats: s.KnownFormats, - Options: s.Options, - } + return newSliceValidator( + s.Path, + s.in, + s.Schema.MaxItems, + s.Schema.MinItems, + s.Schema.UniqueItems, + s.Schema.AdditionalItems, + s.Schema.Items, + s.Root, + s.KnownFormats, + s.Options, + ) } func (s *SchemaValidator) numberValidator() valueValidator { - return &numberValidator{ - Path: s.Path, - In: s.in, - Default: s.Schema.Default, - MultipleOf: s.Schema.MultipleOf, - Maximum: s.Schema.Maximum, - ExclusiveMaximum: s.Schema.ExclusiveMaximum, - Minimum: s.Schema.Minimum, - ExclusiveMinimum: s.Schema.ExclusiveMinimum, - } + return newNumberValidator( + s.Path, + s.in, + s.Schema.Default, + s.Schema.MultipleOf, + s.Schema.Maximum, + s.Schema.ExclusiveMaximum, + s.Schema.Minimum, + s.Schema.ExclusiveMinimum, + "", + "", + s.Options, + ) } func (s *SchemaValidator) stringValidator() valueValidator { - return &stringValidator{ - Path: s.Path, - In: s.in, - MaxLength: s.Schema.MaxLength, - MinLength: s.Schema.MinLength, - Pattern: s.Schema.Pattern, - } + return newStringValidator( + s.Path, + s.in, + nil, + false, + false, + s.Schema.MaxLength, + s.Schema.MinLength, + s.Schema.Pattern, + s.Options, + ) } func (s *SchemaValidator) formatValidator() valueValidator { - return &formatValidator{ - Path: s.Path, - In: s.in, - Format: s.Schema.Format, - KnownFormats: s.KnownFormats, - } + return newFormatValidator( + s.Path, + s.in, + s.Schema.Format, + s.KnownFormats, + s.Options, + ) } func (s *SchemaValidator) schemaPropsValidator() valueValidator { sch := s.Schema - return newSchemaPropsValidator(s.Path, s.in, sch.AllOf, sch.OneOf, sch.AnyOf, sch.Not, sch.Dependencies, s.Root, s.KnownFormats, s.Options.Options()...) + return newSchemaPropsValidator( + s.Path, s.in, sch.AllOf, sch.OneOf, sch.AnyOf, sch.Not, sch.Dependencies, s.Root, s.KnownFormats, + s.Options, + ) } func (s *SchemaValidator) objectValidator() valueValidator { - return &objectValidator{ - Path: s.Path, - In: s.in, - MaxProperties: s.Schema.MaxProperties, - MinProperties: s.Schema.MinProperties, - Required: s.Schema.Required, - Properties: s.Schema.Properties, - AdditionalProperties: s.Schema.AdditionalProperties, - PatternProperties: s.Schema.PatternProperties, - Root: s.Root, - KnownFormats: s.KnownFormats, - Options: s.Options, + return newObjectValidator( + s.Path, + s.in, + s.Schema.MaxProperties, + s.Schema.MinProperties, + s.Schema.Required, + s.Schema.Properties, + s.Schema.AdditionalProperties, + s.Schema.PatternProperties, + s.Root, + s.KnownFormats, + s.Options, + ) +} + +func (s *SchemaValidator) redeem() { + pools.poolOfSchemaValidators.RedeemValidator(s) +} + +func (s *SchemaValidator) redeemChildren() { + for i, validator := range s.validators { + if validator == nil { + continue + } + if redeemableChildren, ok := validator.(interface{ redeemChildren() }); ok { + redeemableChildren.redeemChildren() + } + if redeemable, ok := validator.(interface{ redeem() }); ok { + redeemable.redeem() + } + s.validators[i] = nil // free up allocated children if not in pool } } diff --git a/vendor/github.com/go-openapi/validate/schema_option.go b/vendor/github.com/go-openapi/validate/schema_option.go index 4b4879de8..65eeebeaa 100644 --- a/vendor/github.com/go-openapi/validate/schema_option.go +++ b/vendor/github.com/go-openapi/validate/schema_option.go @@ -18,6 +18,9 @@ type SchemaValidatorOptions struct { EnableObjectArrayTypeCheck bool EnableArrayMustHaveItemsCheck bool + recycleValidators bool + recycleResult bool + skipSchemataResult bool } // Option sets optional rules for schema validation @@ -45,10 +48,36 @@ func SwaggerSchema(enable bool) Option { } } -// Options returns current options +// WithRecycleValidators saves memory allocations and makes validators +// available for a single use of Validate() only. +// +// When a validator is recycled, called MUST not call the Validate() method twice. +func WithRecycleValidators(enable bool) Option { + return func(svo *SchemaValidatorOptions) { + svo.recycleValidators = enable + } +} + +func withRecycleResults(enable bool) Option { + return func(svo *SchemaValidatorOptions) { + svo.recycleResult = enable + } +} + +// WithSkipSchemataResult skips the deep audit payload stored in validation Result +func WithSkipSchemataResult(enable bool) Option { + return func(svo *SchemaValidatorOptions) { + svo.skipSchemataResult = enable + } +} + +// Options returns the current set of options func (svo SchemaValidatorOptions) Options() []Option { return []Option{ EnableObjectArrayTypeCheck(svo.EnableObjectArrayTypeCheck), EnableArrayMustHaveItemsCheck(svo.EnableArrayMustHaveItemsCheck), + WithRecycleValidators(svo.recycleValidators), + withRecycleResults(svo.recycleResult), + WithSkipSchemataResult(svo.skipSchemataResult), } } diff --git a/vendor/github.com/go-openapi/validate/schema_props.go b/vendor/github.com/go-openapi/validate/schema_props.go index 9bac3d29f..1ca379244 100644 --- a/vendor/github.com/go-openapi/validate/schema_props.go +++ b/vendor/github.com/go-openapi/validate/schema_props.go @@ -30,211 +30,327 @@ type schemaPropsValidator struct { AnyOf []spec.Schema Not *spec.Schema Dependencies spec.Dependencies - anyOfValidators []SchemaValidator - allOfValidators []SchemaValidator - oneOfValidators []SchemaValidator + anyOfValidators []*SchemaValidator + allOfValidators []*SchemaValidator + oneOfValidators []*SchemaValidator notValidator *SchemaValidator Root interface{} KnownFormats strfmt.Registry - Options SchemaValidatorOptions + Options *SchemaValidatorOptions } func (s *schemaPropsValidator) SetPath(path string) { s.Path = path } -func newSchemaPropsValidator(path string, in string, allOf, oneOf, anyOf []spec.Schema, not *spec.Schema, deps spec.Dependencies, root interface{}, formats strfmt.Registry, options ...Option) *schemaPropsValidator { - anyValidators := make([]SchemaValidator, 0, len(anyOf)) - for _, v := range anyOf { - v := v - anyValidators = append(anyValidators, *NewSchemaValidator(&v, root, path, formats, options...)) +func newSchemaPropsValidator( + path string, in string, allOf, oneOf, anyOf []spec.Schema, not *spec.Schema, deps spec.Dependencies, root interface{}, formats strfmt.Registry, + opts *SchemaValidatorOptions) *schemaPropsValidator { + if opts == nil { + opts = new(SchemaValidatorOptions) } - allValidators := make([]SchemaValidator, 0, len(allOf)) - for _, v := range allOf { - v := v - allValidators = append(allValidators, *NewSchemaValidator(&v, root, path, formats, options...)) + + anyValidators := make([]*SchemaValidator, 0, len(anyOf)) + for i := range anyOf { + anyValidators = append(anyValidators, newSchemaValidator(&anyOf[i], root, path, formats, opts)) } - oneValidators := make([]SchemaValidator, 0, len(oneOf)) - for _, v := range oneOf { - v := v - oneValidators = append(oneValidators, *NewSchemaValidator(&v, root, path, formats, options...)) + allValidators := make([]*SchemaValidator, 0, len(allOf)) + for i := range allOf { + allValidators = append(allValidators, newSchemaValidator(&allOf[i], root, path, formats, opts)) + } + oneValidators := make([]*SchemaValidator, 0, len(oneOf)) + for i := range oneOf { + oneValidators = append(oneValidators, newSchemaValidator(&oneOf[i], root, path, formats, opts)) } var notValidator *SchemaValidator if not != nil { - notValidator = NewSchemaValidator(not, root, path, formats, options...) + notValidator = newSchemaValidator(not, root, path, formats, opts) } - schOptions := &SchemaValidatorOptions{} - for _, o := range options { - o(schOptions) - } - return &schemaPropsValidator{ - Path: path, - In: in, - AllOf: allOf, - OneOf: oneOf, - AnyOf: anyOf, - Not: not, - Dependencies: deps, - anyOfValidators: anyValidators, - allOfValidators: allValidators, - oneOfValidators: oneValidators, - notValidator: notValidator, - Root: root, - KnownFormats: formats, - Options: *schOptions, + var s *schemaPropsValidator + if opts.recycleValidators { + s = pools.poolOfSchemaPropsValidators.BorrowValidator() + } else { + s = new(schemaPropsValidator) } + + s.Path = path + s.In = in + s.AllOf = allOf + s.OneOf = oneOf + s.AnyOf = anyOf + s.Not = not + s.Dependencies = deps + s.anyOfValidators = anyValidators + s.allOfValidators = allValidators + s.oneOfValidators = oneValidators + s.notValidator = notValidator + s.Root = root + s.KnownFormats = formats + s.Options = opts + + return s } -func (s *schemaPropsValidator) Applies(source interface{}, kind reflect.Kind) bool { - r := reflect.TypeOf(source) == specSchemaType - debugLog("schema props validator for %q applies %t for %T (kind: %v)\n", s.Path, r, source, kind) - return r +func (s *schemaPropsValidator) Applies(source interface{}, _ reflect.Kind) bool { + _, isSchema := source.(*spec.Schema) + return isSchema } func (s *schemaPropsValidator) Validate(data interface{}) *Result { - mainResult := new(Result) + var mainResult *Result + if s.Options.recycleResult { + mainResult = pools.poolOfResults.BorrowResult() + } else { + mainResult = new(Result) + } // Intermediary error results // IMPORTANT! messages from underlying validators - keepResultAnyOf := new(Result) - keepResultOneOf := new(Result) - keepResultAllOf := new(Result) + var keepResultAnyOf, keepResultOneOf, keepResultAllOf *Result + + if s.Options.recycleValidators { + defer func() { + s.redeemChildren() + s.redeem() + + // results are redeemed when merged + }() + } - // Validates at least one in anyOf schemas - var firstSuccess *Result if len(s.anyOfValidators) > 0 { - var bestFailures *Result - succeededOnce := false - for _, anyOfSchema := range s.anyOfValidators { - result := anyOfSchema.Validate(data) - // We keep inner IMPORTANT! errors no matter what MatchCount tells us - keepResultAnyOf.Merge(result.keepRelevantErrors()) - if result.IsValid() { - bestFailures = nil - succeededOnce = true - if firstSuccess == nil { - firstSuccess = result - } - keepResultAnyOf = new(Result) - break - } - // MatchCount is used to select errors from the schema with most positive checks - if bestFailures == nil || result.MatchCount > bestFailures.MatchCount { - bestFailures = result - } - } - - if !succeededOnce { - mainResult.AddErrors(mustValidateAtLeastOneSchemaMsg(s.Path)) - } - if bestFailures != nil { - mainResult.Merge(bestFailures) - } else if firstSuccess != nil { - mainResult.Merge(firstSuccess) - } + keepResultAnyOf = pools.poolOfResults.BorrowResult() + s.validateAnyOf(data, mainResult, keepResultAnyOf) } - // Validates exactly one in oneOf schemas if len(s.oneOfValidators) > 0 { - var bestFailures *Result - var firstSuccess *Result - validated := 0 - - for _, oneOfSchema := range s.oneOfValidators { - result := oneOfSchema.Validate(data) - // We keep inner IMPORTANT! errors no matter what MatchCount tells us - keepResultOneOf.Merge(result.keepRelevantErrors()) - if result.IsValid() { - validated++ - bestFailures = nil - if firstSuccess == nil { - firstSuccess = result - } - keepResultOneOf = new(Result) - continue - } - // MatchCount is used to select errors from the schema with most positive checks - if validated == 0 && (bestFailures == nil || result.MatchCount > bestFailures.MatchCount) { - bestFailures = result - } - } - - if validated != 1 { - var additionalMsg string - if validated == 0 { - additionalMsg = "Found none valid" - } else { - additionalMsg = fmt.Sprintf("Found %d valid alternatives", validated) - } - - mainResult.AddErrors(mustValidateOnlyOneSchemaMsg(s.Path, additionalMsg)) - if bestFailures != nil { - mainResult.Merge(bestFailures) - } - } else if firstSuccess != nil { - mainResult.Merge(firstSuccess) - } + keepResultOneOf = pools.poolOfResults.BorrowResult() + s.validateOneOf(data, mainResult, keepResultOneOf) } - // Validates all of allOf schemas if len(s.allOfValidators) > 0 { - validated := 0 - - for _, allOfSchema := range s.allOfValidators { - result := allOfSchema.Validate(data) - // We keep inner IMPORTANT! errors no matter what MatchCount tells us - keepResultAllOf.Merge(result.keepRelevantErrors()) - // keepResultAllOf.Merge(result) - if result.IsValid() { - validated++ - } - mainResult.Merge(result) - } - - if validated != len(s.allOfValidators) { - additionalMsg := "" - if validated == 0 { - additionalMsg = ". None validated" - } - - mainResult.AddErrors(mustValidateAllSchemasMsg(s.Path, additionalMsg)) - } + keepResultAllOf = pools.poolOfResults.BorrowResult() + s.validateAllOf(data, mainResult, keepResultAllOf) } if s.notValidator != nil { - result := s.notValidator.Validate(data) - // We keep inner IMPORTANT! errors no matter what MatchCount tells us - if result.IsValid() { - mainResult.AddErrors(mustNotValidatechemaMsg(s.Path)) - } + s.validateNot(data, mainResult) } if s.Dependencies != nil && len(s.Dependencies) > 0 && reflect.TypeOf(data).Kind() == reflect.Map { - val := data.(map[string]interface{}) - for key := range val { - if dep, ok := s.Dependencies[key]; ok { - - if dep.Schema != nil { - mainResult.Merge(NewSchemaValidator(dep.Schema, s.Root, s.Path+"."+key, s.KnownFormats, s.Options.Options()...).Validate(data)) - continue - } - - if len(dep.Property) > 0 { - for _, depKey := range dep.Property { - if _, ok := val[depKey]; !ok { - mainResult.AddErrors(hasADependencyMsg(s.Path, depKey)) - } - } - } - } - } + s.validateDependencies(data, mainResult) } mainResult.Inc() + // In the end we retain best failures for schema validation // plus, if any, composite errors which may explain special cases (tagged as IMPORTANT!). return mainResult.Merge(keepResultAllOf, keepResultOneOf, keepResultAnyOf) } + +func (s *schemaPropsValidator) validateAnyOf(data interface{}, mainResult, keepResultAnyOf *Result) { + // Validates at least one in anyOf schemas + var bestFailures *Result + + for i, anyOfSchema := range s.anyOfValidators { + result := anyOfSchema.Validate(data) + if s.Options.recycleValidators { + s.anyOfValidators[i] = nil + } + // We keep inner IMPORTANT! errors no matter what MatchCount tells us + keepResultAnyOf.Merge(result.keepRelevantErrors()) // merges (and redeems) a new instance of Result + + if result.IsValid() { + if bestFailures != nil && bestFailures.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(bestFailures) + } + + _ = keepResultAnyOf.cleared() + mainResult.Merge(result) + + return + } + + // MatchCount is used to select errors from the schema with most positive checks + if bestFailures == nil || result.MatchCount > bestFailures.MatchCount { + if bestFailures != nil && bestFailures.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(bestFailures) + } + bestFailures = result + + continue + } + + if result.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(result) // this result is ditched + } + } + + mainResult.AddErrors(mustValidateAtLeastOneSchemaMsg(s.Path)) + mainResult.Merge(bestFailures) +} + +func (s *schemaPropsValidator) validateOneOf(data interface{}, mainResult, keepResultOneOf *Result) { + // Validates exactly one in oneOf schemas + var ( + firstSuccess, bestFailures *Result + validated int + ) + + for i, oneOfSchema := range s.oneOfValidators { + result := oneOfSchema.Validate(data) + if s.Options.recycleValidators { + s.oneOfValidators[i] = nil + } + + // We keep inner IMPORTANT! errors no matter what MatchCount tells us + keepResultOneOf.Merge(result.keepRelevantErrors()) // merges (and redeems) a new instance of Result + + if result.IsValid() { + validated++ + _ = keepResultOneOf.cleared() + + if firstSuccess == nil { + firstSuccess = result + } else if result.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(result) // this result is ditched + } + + continue + } + + // MatchCount is used to select errors from the schema with most positive checks + if validated == 0 && (bestFailures == nil || result.MatchCount > bestFailures.MatchCount) { + if bestFailures != nil && bestFailures.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(bestFailures) + } + bestFailures = result + } else if result.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(result) // this result is ditched + } + } + + switch validated { + case 0: + mainResult.AddErrors(mustValidateOnlyOneSchemaMsg(s.Path, "Found none valid")) + mainResult.Merge(bestFailures) + // firstSucess necessarily nil + case 1: + mainResult.Merge(firstSuccess) + if bestFailures != nil && bestFailures.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(bestFailures) + } + default: + mainResult.AddErrors(mustValidateOnlyOneSchemaMsg(s.Path, fmt.Sprintf("Found %d valid alternatives", validated))) + mainResult.Merge(bestFailures) + if firstSuccess != nil && firstSuccess.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(firstSuccess) + } + } +} + +func (s *schemaPropsValidator) validateAllOf(data interface{}, mainResult, keepResultAllOf *Result) { + // Validates all of allOf schemas + var validated int + + for i, allOfSchema := range s.allOfValidators { + result := allOfSchema.Validate(data) + if s.Options.recycleValidators { + s.allOfValidators[i] = nil + } + // We keep inner IMPORTANT! errors no matter what MatchCount tells us + keepResultAllOf.Merge(result.keepRelevantErrors()) + if result.IsValid() { + validated++ + } + mainResult.Merge(result) + } + + switch validated { + case 0: + mainResult.AddErrors(mustValidateAllSchemasMsg(s.Path, ". None validated")) + case len(s.allOfValidators): + default: + mainResult.AddErrors(mustValidateAllSchemasMsg(s.Path, "")) + } +} + +func (s *schemaPropsValidator) validateNot(data interface{}, mainResult *Result) { + result := s.notValidator.Validate(data) + if s.Options.recycleValidators { + s.notValidator = nil + } + // We keep inner IMPORTANT! errors no matter what MatchCount tells us + if result.IsValid() { + mainResult.AddErrors(mustNotValidatechemaMsg(s.Path)) + } + if result.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(result) // this result is ditched + } +} + +func (s *schemaPropsValidator) validateDependencies(data interface{}, mainResult *Result) { + val := data.(map[string]interface{}) + for key := range val { + dep, ok := s.Dependencies[key] + if !ok { + continue + } + + if dep.Schema != nil { + mainResult.Merge( + newSchemaValidator(dep.Schema, s.Root, s.Path+"."+key, s.KnownFormats, s.Options).Validate(data), + ) + continue + } + + if len(dep.Property) > 0 { + for _, depKey := range dep.Property { + if _, ok := val[depKey]; !ok { + mainResult.AddErrors(hasADependencyMsg(s.Path, depKey)) + } + } + } + } +} + +func (s *schemaPropsValidator) redeem() { + pools.poolOfSchemaPropsValidators.RedeemValidator(s) +} + +func (s *schemaPropsValidator) redeemChildren() { + for _, v := range s.anyOfValidators { + if v == nil { + continue + } + v.redeemChildren() + v.redeem() + } + s.anyOfValidators = nil + + for _, v := range s.allOfValidators { + if v == nil { + continue + } + v.redeemChildren() + v.redeem() + } + s.allOfValidators = nil + + for _, v := range s.oneOfValidators { + if v == nil { + continue + } + v.redeemChildren() + v.redeem() + } + s.oneOfValidators = nil + + if s.notValidator != nil { + s.notValidator.redeemChildren() + s.notValidator.redeem() + s.notValidator = nil + } +} diff --git a/vendor/github.com/go-openapi/validate/slice_validator.go b/vendor/github.com/go-openapi/validate/slice_validator.go index aa429f518..13bb02087 100644 --- a/vendor/github.com/go-openapi/validate/slice_validator.go +++ b/vendor/github.com/go-openapi/validate/slice_validator.go @@ -32,7 +32,36 @@ type schemaSliceValidator struct { Items *spec.SchemaOrArray Root interface{} KnownFormats strfmt.Registry - Options SchemaValidatorOptions + Options *SchemaValidatorOptions +} + +func newSliceValidator(path, in string, + maxItems, minItems *int64, uniqueItems bool, + additionalItems *spec.SchemaOrBool, items *spec.SchemaOrArray, + root interface{}, formats strfmt.Registry, opts *SchemaValidatorOptions) *schemaSliceValidator { + if opts == nil { + opts = new(SchemaValidatorOptions) + } + + var v *schemaSliceValidator + if opts.recycleValidators { + v = pools.poolOfSliceValidators.BorrowValidator() + } else { + v = new(schemaSliceValidator) + } + + v.Path = path + v.In = in + v.MaxItems = maxItems + v.MinItems = minItems + v.UniqueItems = uniqueItems + v.AdditionalItems = additionalItems + v.Items = items + v.Root = root + v.KnownFormats = formats + v.Options = opts + + return v } func (s *schemaSliceValidator) SetPath(path string) { @@ -46,7 +75,18 @@ func (s *schemaSliceValidator) Applies(source interface{}, kind reflect.Kind) bo } func (s *schemaSliceValidator) Validate(data interface{}) *Result { - result := new(Result) + if s.Options.recycleValidators { + defer func() { + s.redeem() + }() + } + + var result *Result + if s.Options.recycleResult { + result = pools.poolOfResults.BorrowResult() + } else { + result = new(Result) + } if data == nil { return result } @@ -54,8 +94,8 @@ func (s *schemaSliceValidator) Validate(data interface{}) *Result { size := val.Len() if s.Items != nil && s.Items.Schema != nil { - validator := NewSchemaValidator(s.Items.Schema, s.Root, s.Path, s.KnownFormats, s.Options.Options()...) for i := 0; i < size; i++ { + validator := newSchemaValidator(s.Items.Schema, s.Root, s.Path, s.KnownFormats, s.Options) validator.SetPath(fmt.Sprintf("%s.%d", s.Path, i)) value := val.Index(i) result.mergeForSlice(val, i, validator.Validate(value.Interface())) @@ -66,10 +106,11 @@ func (s *schemaSliceValidator) Validate(data interface{}) *Result { if s.Items != nil && len(s.Items.Schemas) > 0 { itemsSize = len(s.Items.Schemas) for i := 0; i < itemsSize; i++ { - validator := NewSchemaValidator(&s.Items.Schemas[i], s.Root, fmt.Sprintf("%s.%d", s.Path, i), s.KnownFormats, s.Options.Options()...) - if val.Len() <= i { + if size <= i { break } + + validator := newSchemaValidator(&s.Items.Schemas[i], s.Root, fmt.Sprintf("%s.%d", s.Path, i), s.KnownFormats, s.Options) result.mergeForSlice(val, i, validator.Validate(val.Index(i).Interface())) } } @@ -79,7 +120,7 @@ func (s *schemaSliceValidator) Validate(data interface{}) *Result { } if s.AdditionalItems.Schema != nil { for i := itemsSize; i < size-itemsSize+1; i++ { - validator := NewSchemaValidator(s.AdditionalItems.Schema, s.Root, fmt.Sprintf("%s.%d", s.Path, i), s.KnownFormats, s.Options.Options()...) + validator := newSchemaValidator(s.AdditionalItems.Schema, s.Root, fmt.Sprintf("%s.%d", s.Path, i), s.KnownFormats, s.Options) result.mergeForSlice(val, i, validator.Validate(val.Index(i).Interface())) } } @@ -103,3 +144,7 @@ func (s *schemaSliceValidator) Validate(data interface{}) *Result { result.Inc() return result } + +func (s *schemaSliceValidator) redeem() { + pools.poolOfSliceValidators.RedeemValidator(s) +} diff --git a/vendor/github.com/go-openapi/validate/spec.go b/vendor/github.com/go-openapi/validate/spec.go index dff01f00b..965452566 100644 --- a/vendor/github.com/go-openapi/validate/spec.go +++ b/vendor/github.com/go-openapi/validate/spec.go @@ -15,6 +15,8 @@ package validate import ( + "bytes" + "encoding/gob" "encoding/json" "fmt" "sort" @@ -26,23 +28,23 @@ "github.com/go-openapi/loads" "github.com/go-openapi/spec" "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" ) // Spec validates an OpenAPI 2.0 specification document. // // Returns an error flattening in a single standard error, all validation messages. // -// - TODO: $ref should not have siblings -// - TODO: make sure documentation reflects all checks and warnings -// - TODO: check on discriminators -// - TODO: explicit message on unsupported keywords (better than "forbidden property"...) -// - TODO: full list of unresolved refs -// - TODO: validate numeric constraints (issue#581): this should be handled like defaults and examples -// - TODO: option to determine if we validate for go-swagger or in a more general context -// - TODO: check on required properties to support anyOf, allOf, oneOf +// - TODO: $ref should not have siblings +// - TODO: make sure documentation reflects all checks and warnings +// - TODO: check on discriminators +// - TODO: explicit message on unsupported keywords (better than "forbidden property"...) +// - TODO: full list of unresolved refs +// - TODO: validate numeric constraints (issue#581): this should be handled like defaults and examples +// - TODO: option to determine if we validate for go-swagger or in a more general context +// - TODO: check on required properties to support anyOf, allOf, oneOf // // NOTE: SecurityScopes are maps: no need to check uniqueness -// func Spec(doc *loads.Document, formats strfmt.Registry) error { errs, _ /*warns*/ := NewSpecValidator(doc.Schema(), formats).Validate(doc) if errs.HasErrors() { @@ -53,25 +55,38 @@ func Spec(doc *loads.Document, formats strfmt.Registry) error { // SpecValidator validates a swagger 2.0 spec type SpecValidator struct { - schema *spec.Schema // swagger 2.0 schema - spec *loads.Document - analyzer *analysis.Spec - expanded *loads.Document - KnownFormats strfmt.Registry - Options Opts // validation options + schema *spec.Schema // swagger 2.0 schema + spec *loads.Document + analyzer *analysis.Spec + expanded *loads.Document + KnownFormats strfmt.Registry + Options Opts // validation options + schemaOptions *SchemaValidatorOptions } // NewSpecValidator creates a new swagger spec validator instance func NewSpecValidator(schema *spec.Schema, formats strfmt.Registry) *SpecValidator { + // schema options that apply to all called validators + schemaOptions := new(SchemaValidatorOptions) + for _, o := range []Option{ + SwaggerSchema(true), + WithRecycleValidators(true), + // withRecycleResults(true), + } { + o(schemaOptions) + } + return &SpecValidator{ - schema: schema, - KnownFormats: formats, - Options: defaultOpts, + schema: schema, + KnownFormats: formats, + Options: defaultOpts, + schemaOptions: schemaOptions, } } // Validate validates the swagger spec func (s *SpecValidator) Validate(data interface{}) (*Result, *Result) { + s.schemaOptions.skipSchemataResult = s.Options.SkipSchemataResult var sd *loads.Document errs, warnings := new(Result), new(Result) @@ -85,11 +100,8 @@ func (s *SpecValidator) Validate(data interface{}) (*Result, *Result) { s.spec = sd s.analyzer = analysis.New(sd.Spec()) - // Swagger schema validator - schv := NewSchemaValidator(s.schema, nil, "", s.KnownFormats, SwaggerSchema(true)) - var obj interface{} - // Raw spec unmarshalling errors + var obj interface{} if err := json.Unmarshal(sd.Raw(), &obj); err != nil { // NOTE: under normal conditions, the *load.Document has been already unmarshalled // So this one is just a paranoid check on the behavior of the spec package @@ -103,6 +115,8 @@ func (s *SpecValidator) Validate(data interface{}) (*Result, *Result) { warnings.AddErrors(errs.Warnings...) }() + // Swagger schema validator + schv := newSchemaValidator(s.schema, nil, "", s.KnownFormats, s.schemaOptions) errs.Merge(schv.Validate(obj)) // error - // There may be a point in continuing to try and determine more accurate errors if !s.Options.ContinueOnErrors && errs.HasErrors() { @@ -130,13 +144,13 @@ func (s *SpecValidator) Validate(data interface{}) (*Result, *Result) { } // Values provided as default MUST validate their schema - df := &defaultValidator{SpecValidator: s} + df := &defaultValidator{SpecValidator: s, schemaOptions: s.schemaOptions} errs.Merge(df.Validate()) // Values provided as examples MUST validate their schema // Value provided as examples in a response without schema generate a warning // Known limitations: examples in responses for mime type not application/json are ignored (warning) - ex := &exampleValidator{SpecValidator: s} + ex := &exampleValidator{SpecValidator: s, schemaOptions: s.schemaOptions} errs.Merge(ex.Validate()) errs.Merge(s.validateNonEmptyPathParamNames()) @@ -148,22 +162,27 @@ func (s *SpecValidator) Validate(data interface{}) (*Result, *Result) { } func (s *SpecValidator) validateNonEmptyPathParamNames() *Result { - res := new(Result) + res := pools.poolOfResults.BorrowResult() if s.spec.Spec().Paths == nil { // There is no Paths object: error res.AddErrors(noValidPathMsg()) - } else { - if s.spec.Spec().Paths.Paths == nil { - // Paths may be empty: warning - res.AddWarnings(noValidPathMsg()) - } else { - for k := range s.spec.Spec().Paths.Paths { - if strings.Contains(k, "{}") { - res.AddErrors(emptyPathParameterMsg(k)) - } - } + + return res + } + + if s.spec.Spec().Paths.Paths == nil { + // Paths may be empty: warning + res.AddWarnings(noValidPathMsg()) + + return res + } + + for k := range s.spec.Spec().Paths.Paths { + if strings.Contains(k, "{}") { + res.AddErrors(emptyPathParameterMsg(k)) } } + return res } @@ -177,7 +196,7 @@ func (s *SpecValidator) validateDuplicateOperationIDs() *Result { // fallback on possible incomplete picture because of previous errors analyzer = s.analyzer } - res := new(Result) + res := pools.poolOfResults.BorrowResult() known := make(map[string]int) for _, v := range analyzer.OperationIDs() { if v != "" { @@ -199,7 +218,7 @@ type dupProp struct { func (s *SpecValidator) validateDuplicatePropertyNames() *Result { // definition can't declare a property that's already defined by one of its ancestors - res := new(Result) + res := pools.poolOfResults.BorrowResult() for k, sch := range s.spec.Spec().Definitions { if len(sch.AllOf) == 0 { continue @@ -248,7 +267,7 @@ func (s *SpecValidator) validateSchemaPropertyNames(nm string, sch spec.Schema, schn := nm schc := &sch - res := new(Result) + res := pools.poolOfResults.BorrowResult() for schc.Ref.String() != "" { // gather property names @@ -285,7 +304,7 @@ func (s *SpecValidator) validateSchemaPropertyNames(nm string, sch spec.Schema, } func (s *SpecValidator) validateCircularAncestry(nm string, sch spec.Schema, knowns map[string]struct{}) ([]string, *Result) { - res := new(Result) + res := pools.poolOfResults.BorrowResult() if sch.Ref.String() == "" && len(sch.AllOf) == 0 { // Safeguard. We should not be able to actually get there return nil, res @@ -335,7 +354,7 @@ func (s *SpecValidator) validateCircularAncestry(nm string, sch spec.Schema, kno func (s *SpecValidator) validateItems() *Result { // validate parameter, items, schema and response objects for presence of item if type is array - res := new(Result) + res := pools.poolOfResults.BorrowResult() for method, pi := range s.analyzer.Operations() { for path, op := range pi { @@ -394,7 +413,7 @@ func (s *SpecValidator) validateItems() *Result { // Verifies constraints on array type func (s *SpecValidator) validateSchemaItems(schema spec.Schema, prefix, opID string) *Result { - res := new(Result) + res := pools.poolOfResults.BorrowResult() if !schema.Type.Contains(arrayType) { return res } @@ -418,7 +437,7 @@ func (s *SpecValidator) validateSchemaItems(schema spec.Schema, prefix, opID str func (s *SpecValidator) validatePathParamPresence(path string, fromPath, fromOperation []string) *Result { // Each defined operation path parameters must correspond to a named element in the API's path pattern. // (For example, you cannot have a path parameter named id for the following path /pets/{petId} but you must have a path parameter named petId.) - res := new(Result) + res := pools.poolOfResults.BorrowResult() for _, l := range fromPath { var matched bool for _, r := range fromOperation { @@ -456,7 +475,6 @@ func (s *SpecValidator) validateReferenced() *Result { return &res } -// nolint: dupl func (s *SpecValidator) validateReferencedParameters() *Result { // Each referenceable definition should have references. params := s.spec.Spec().Parameters @@ -475,14 +493,13 @@ func (s *SpecValidator) validateReferencedParameters() *Result { if len(expected) == 0 { return nil } - result := new(Result) + result := pools.poolOfResults.BorrowResult() for k := range expected { result.AddWarnings(unusedParamMsg(k)) } return result } -// nolint: dupl func (s *SpecValidator) validateReferencedResponses() *Result { // Each referenceable definition should have references. responses := s.spec.Spec().Responses @@ -501,14 +518,13 @@ func (s *SpecValidator) validateReferencedResponses() *Result { if len(expected) == 0 { return nil } - result := new(Result) + result := pools.poolOfResults.BorrowResult() for k := range expected { result.AddWarnings(unusedResponseMsg(k)) } return result } -// nolint: dupl func (s *SpecValidator) validateReferencedDefinitions() *Result { // Each referenceable definition must have references. defs := s.spec.Spec().Definitions @@ -537,7 +553,7 @@ func (s *SpecValidator) validateReferencedDefinitions() *Result { func (s *SpecValidator) validateRequiredDefinitions() *Result { // Each property listed in the required array must be defined in the properties of the model - res := new(Result) + res := pools.poolOfResults.BorrowResult() DEFINITIONS: for d, schema := range s.spec.Spec().Definitions { @@ -556,7 +572,7 @@ func (s *SpecValidator) validateRequiredDefinitions() *Result { func (s *SpecValidator) validateRequiredProperties(path, in string, v *spec.Schema) *Result { // Takes care of recursive property definitions, which may be nested in additionalProperties schemas - res := new(Result) + res := pools.poolOfResults.BorrowResult() propertyMatch := false patternMatch := false additionalPropertiesMatch := false @@ -615,40 +631,42 @@ func (s *SpecValidator) validateRequiredProperties(path, in string, v *spec.Sche func (s *SpecValidator) validateParameters() *Result { // - for each method, path is unique, regardless of path parameters // e.g. GET:/petstore/{id}, GET:/petstore/{pet}, GET:/petstore are - // considered duplicate paths + // considered duplicate paths, if StrictPathParamUniqueness is enabled. // - each parameter should have a unique `name` and `type` combination // - each operation should have only 1 parameter of type body // - there must be at most 1 parameter in body // - parameters with pattern property must specify valid patterns // - $ref in parameters must resolve // - path param must be required - res := new(Result) + res := pools.poolOfResults.BorrowResult() rexGarbledPathSegment := mustCompileRegexp(`.*[{}\s]+.*`) for method, pi := range s.expandedAnalyzer().Operations() { methodPaths := make(map[string]map[string]string) for path, op := range pi { - pathToAdd := pathHelp.stripParametersInPath(path) + if s.Options.StrictPathParamUniqueness { + pathToAdd := pathHelp.stripParametersInPath(path) - // Warn on garbled path afer param stripping - if rexGarbledPathSegment.MatchString(pathToAdd) { - res.AddWarnings(pathStrippedParamGarbledMsg(pathToAdd)) - } + // Warn on garbled path afer param stripping + if rexGarbledPathSegment.MatchString(pathToAdd) { + res.AddWarnings(pathStrippedParamGarbledMsg(pathToAdd)) + } - // Check uniqueness of stripped paths - if _, found := methodPaths[method][pathToAdd]; found { + // Check uniqueness of stripped paths + if _, found := methodPaths[method][pathToAdd]; found { - // Sort names for stable, testable output - if strings.Compare(path, methodPaths[method][pathToAdd]) < 0 { - res.AddErrors(pathOverlapMsg(path, methodPaths[method][pathToAdd])) + // Sort names for stable, testable output + if strings.Compare(path, methodPaths[method][pathToAdd]) < 0 { + res.AddErrors(pathOverlapMsg(path, methodPaths[method][pathToAdd])) + } else { + res.AddErrors(pathOverlapMsg(methodPaths[method][pathToAdd], path)) + } } else { - res.AddErrors(pathOverlapMsg(methodPaths[method][pathToAdd], path)) - } - } else { - if _, found := methodPaths[method]; !found { - methodPaths[method] = map[string]string{} - } - methodPaths[method][pathToAdd] = path // Original non stripped path + if _, found := methodPaths[method]; !found { + methodPaths[method] = map[string]string{} + } + methodPaths[method][pathToAdd] = path // Original non stripped path + } } var bodyParams []string @@ -659,7 +677,23 @@ func (s *SpecValidator) validateParameters() *Result { // TODO: should be done after param expansion res.Merge(s.checkUniqueParams(path, method, op)) + // pick the root schema from the swagger specification which describes a parameter + origSchema, ok := s.schema.Definitions["parameter"] + if !ok { + panic("unexpected swagger schema: missing #/definitions/parameter") + } + // clone it once to avoid expanding a global schema (e.g. swagger spec) + paramSchema, err := deepCloneSchema(origSchema) + if err != nil { + panic(fmt.Errorf("can't clone schema: %v", err)) + } + for _, pr := range paramHelp.safeExpandedParamsFor(path, method, op.ID, res, s) { + // An expanded parameter must validate the Parameter schema (an unexpanded $ref always passes high-level schema validation) + schv := newSchemaValidator(¶mSchema, s.schema, fmt.Sprintf("%s.%s.parameters.%s", path, method, pr.Name), s.KnownFormats, s.schemaOptions) + obj := swag.ToDynamicJSON(pr) + res.Merge(schv.Validate(obj)) + // Validate pattern regexp for parameters with a Pattern property if _, err := compileRegexp(pr.Pattern); err != nil { res.AddErrors(invalidPatternInParamMsg(op.ID, pr.Name, pr.Pattern)) @@ -741,7 +775,7 @@ func (s *SpecValidator) validateParameters() *Result { func (s *SpecValidator) validateReferencesValid() *Result { // each reference must point to a valid object - res := new(Result) + res := pools.poolOfResults.BorrowResult() for _, r := range s.analyzer.AllRefs() { if !r.IsValidURI(s.spec.SpecFilePath()) { // Safeguard - spec should always yield a valid URI res.AddErrors(invalidRefMsg(r.String())) @@ -767,7 +801,7 @@ func (s *SpecValidator) checkUniqueParams(path, method string, op *spec.Operatio // However, there are some issues with such a factorization: // - analysis does not seem to fully expand params // - param keys may be altered by x-go-name - res := new(Result) + res := pools.poolOfResults.BorrowResult() pnames := make(map[string]struct{}) if op.Parameters != nil { // Safeguard @@ -802,3 +836,17 @@ func (s *SpecValidator) expandedAnalyzer() *analysis.Spec { } return s.analyzer } + +func deepCloneSchema(src spec.Schema) (spec.Schema, error) { + var b bytes.Buffer + if err := gob.NewEncoder(&b).Encode(src); err != nil { + return spec.Schema{}, err + } + + var dst spec.Schema + if err := gob.NewDecoder(&b).Decode(&dst); err != nil { + return spec.Schema{}, err + } + + return dst, nil +} diff --git a/vendor/github.com/go-openapi/validate/spec_messages.go b/vendor/github.com/go-openapi/validate/spec_messages.go index b3757addd..6d1f0f819 100644 --- a/vendor/github.com/go-openapi/validate/spec_messages.go +++ b/vendor/github.com/go-openapi/validate/spec_messages.go @@ -187,6 +187,8 @@ // UnusedResponseWarning ... UnusedResponseWarning = "response %q is not used anywhere" + + InvalidObject = "expected an object in %q.%s" ) // Additional error codes @@ -347,11 +349,15 @@ func invalidParameterDefinitionAsSchemaMsg(path, method, operationID string) err func parameterValidationTypeMismatchMsg(param, path, typ string) errors.Error { return errors.New(errors.CompositeErrorCode, ParamValidationTypeMismatch, param, path, typ) } +func invalidObjectMsg(path, in string) errors.Error { + return errors.New(errors.CompositeErrorCode, InvalidObject, path, in) +} // disabled -// func invalidResponseDefinitionAsSchemaMsg(path, method string) errors.Error { -// return errors.New(errors.CompositeErrorCode, InvalidResponseDefinitionAsSchemaError, path, method) -// } +// +// func invalidResponseDefinitionAsSchemaMsg(path, method string) errors.Error { +// return errors.New(errors.CompositeErrorCode, InvalidResponseDefinitionAsSchemaError, path, method) +// } func someParametersBrokenMsg(path, method, operationID string) errors.Error { return errors.New(errors.CompositeErrorCode, SomeParametersBrokenError, path, method, operationID) } diff --git a/vendor/github.com/go-openapi/validate/type.go b/vendor/github.com/go-openapi/validate/type.go index 876467588..f87abb3d5 100644 --- a/vendor/github.com/go-openapi/validate/type.go +++ b/vendor/github.com/go-openapi/validate/type.go @@ -25,11 +25,34 @@ ) type typeValidator struct { + Path string + In string Type spec.StringOrArray Nullable bool Format string - In string - Path string + Options *SchemaValidatorOptions +} + +func newTypeValidator(path, in string, typ spec.StringOrArray, nullable bool, format string, opts *SchemaValidatorOptions) *typeValidator { + if opts == nil { + opts = new(SchemaValidatorOptions) + } + + var t *typeValidator + if opts.recycleValidators { + t = pools.poolOfTypeValidators.BorrowValidator() + } else { + t = new(typeValidator) + } + + t.Path = path + t.In = in + t.Type = typ + t.Nullable = nullable + t.Format = format + t.Options = opts + + return t } func (t *typeValidator) schemaInfoForType(data interface{}) (string, string) { @@ -90,7 +113,7 @@ func (t *typeValidator) schemaInfoForType(data interface{}) (string, string) { default: val := reflect.ValueOf(data) tpe := val.Type() - switch tpe.Kind() { + switch tpe.Kind() { //nolint:exhaustive case reflect.Bool: return booleanType, "" case reflect.String: @@ -125,23 +148,33 @@ func (t *typeValidator) SetPath(path string) { t.Path = path } -func (t *typeValidator) Applies(source interface{}, kind reflect.Kind) bool { +func (t *typeValidator) Applies(source interface{}, _ reflect.Kind) bool { // typeValidator applies to Schema, Parameter and Header objects - stpe := reflect.TypeOf(source) - r := (len(t.Type) > 0 || t.Format != "") && (stpe == specSchemaType || stpe == specParameterType || stpe == specHeaderType) - debugLog("type validator for %q applies %t for %T (kind: %v)\n", t.Path, r, source, kind) - return r + switch source.(type) { + case *spec.Schema: + case *spec.Parameter: + case *spec.Header: + default: + return false + } + + return (len(t.Type) > 0 || t.Format != "") } func (t *typeValidator) Validate(data interface{}) *Result { - result := new(Result) - result.Inc() + if t.Options.recycleValidators { + defer func() { + t.redeem() + }() + } + if data == nil { // nil or zero value for the passed structure require Type: null if len(t.Type) > 0 && !t.Type.Contains(nullType) && !t.Nullable { // TODO: if a property is not required it also passes this - return errorHelp.sErr(errors.InvalidType(t.Path, t.In, strings.Join(t.Type, ","), nullType)) + return errorHelp.sErr(errors.InvalidType(t.Path, t.In, strings.Join(t.Type, ","), nullType), t.Options.recycleResult) } - return result + + return emptyResult } // check if the type matches, should be used in every validator chain as first item @@ -151,8 +184,6 @@ func (t *typeValidator) Validate(data interface{}) *Result { // infer schema type (JSON) and format from passed data type schType, format := t.schemaInfoForType(data) - debugLog("path: %s, schType: %s, format: %s, expType: %s, expFmt: %s, kind: %s", t.Path, schType, format, t.Type, t.Format, val.Kind().String()) - // check numerical types // TODO: check unsigned ints // TODO: check json.Number (see schema.go) @@ -163,15 +194,20 @@ func (t *typeValidator) Validate(data interface{}) *Result { if kind != reflect.String && kind != reflect.Slice && t.Format != "" && !(t.Type.Contains(schType) || format == t.Format || isFloatInt || isIntFloat || isLowerInt || isLowerFloat) { // TODO: test case - return errorHelp.sErr(errors.InvalidType(t.Path, t.In, t.Format, format)) + return errorHelp.sErr(errors.InvalidType(t.Path, t.In, t.Format, format), t.Options.recycleResult) } if !(t.Type.Contains(numberType) || t.Type.Contains(integerType)) && t.Format != "" && (kind == reflect.String || kind == reflect.Slice) { - return result + return emptyResult } if !(t.Type.Contains(schType) || isFloatInt || isIntFloat) { - return errorHelp.sErr(errors.InvalidType(t.Path, t.In, strings.Join(t.Type, ","), schType)) + return errorHelp.sErr(errors.InvalidType(t.Path, t.In, strings.Join(t.Type, ","), schType), t.Options.recycleResult) } - return result + + return emptyResult +} + +func (t *typeValidator) redeem() { + pools.poolOfTypeValidators.RedeemValidator(t) } diff --git a/vendor/github.com/go-openapi/validate/validator.go b/vendor/github.com/go-openapi/validate/validator.go index 38cdb9bb6..c083aecc9 100644 --- a/vendor/github.com/go-openapi/validate/validator.go +++ b/vendor/github.com/go-openapi/validate/validator.go @@ -39,20 +39,31 @@ type itemsValidator struct { root interface{} path string in string - validators []valueValidator + validators [6]valueValidator KnownFormats strfmt.Registry + Options *SchemaValidatorOptions } -func newItemsValidator(path, in string, items *spec.Items, root interface{}, formats strfmt.Registry) *itemsValidator { - iv := &itemsValidator{path: path, in: in, items: items, root: root, KnownFormats: formats} - iv.validators = []valueValidator{ - &typeValidator{ - Type: spec.StringOrArray([]string{items.Type}), - Nullable: items.Nullable, - Format: items.Format, - In: in, - Path: path, - }, +func newItemsValidator(path, in string, items *spec.Items, root interface{}, formats strfmt.Registry, opts *SchemaValidatorOptions) *itemsValidator { + if opts == nil { + opts = new(SchemaValidatorOptions) + } + + var iv *itemsValidator + if opts.recycleValidators { + iv = pools.poolOfItemsValidators.BorrowValidator() + } else { + iv = new(itemsValidator) + } + + iv.path = path + iv.in = in + iv.items = items + iv.root = root + iv.KnownFormats = formats + iv.Options = opts + iv.validators = [6]valueValidator{ + iv.typeValidator(), iv.stringValidator(), iv.formatValidator(), iv.numberValidator(), @@ -63,77 +74,152 @@ func newItemsValidator(path, in string, items *spec.Items, root interface{}, for } func (i *itemsValidator) Validate(index int, data interface{}) *Result { + if i.Options.recycleValidators { + defer func() { + i.redeemChildren() + i.redeem() + }() + } + tpe := reflect.TypeOf(data) kind := tpe.Kind() - mainResult := new(Result) + var result *Result + if i.Options.recycleResult { + result = pools.poolOfResults.BorrowResult() + } else { + result = new(Result) + } + path := fmt.Sprintf("%s.%d", i.path, index) - for _, validator := range i.validators { - validator.SetPath(path) - if validator.Applies(i.root, kind) { - result := validator.Validate(data) - mainResult.Merge(result) - mainResult.Inc() - if result != nil && result.HasErrors() { - return mainResult + for idx, validator := range i.validators { + if !validator.Applies(i.root, kind) { + if i.Options.recycleValidators { + // Validate won't be called, so relinquish this validator + if redeemableChildren, ok := validator.(interface{ redeemChildren() }); ok { + redeemableChildren.redeemChildren() + } + if redeemable, ok := validator.(interface{ redeem() }); ok { + redeemable.redeem() + } + i.validators[idx] = nil // prevents further (unsafe) usage } + + continue + } + + validator.SetPath(path) + err := validator.Validate(data) + if i.Options.recycleValidators { + i.validators[idx] = nil // prevents further (unsafe) usage + } + if err != nil { + result.Inc() + if err.HasErrors() { + result.Merge(err) + + break + } + + result.Merge(err) } } - return mainResult + + return result +} + +func (i *itemsValidator) typeValidator() valueValidator { + return newTypeValidator( + i.path, + i.in, + spec.StringOrArray([]string{i.items.Type}), + i.items.Nullable, + i.items.Format, + i.Options, + ) } func (i *itemsValidator) commonValidator() valueValidator { - return &basicCommonValidator{ - In: i.in, - Default: i.items.Default, - Enum: i.items.Enum, - } + return newBasicCommonValidator( + "", + i.in, + i.items.Default, + i.items.Enum, + i.Options, + ) } func (i *itemsValidator) sliceValidator() valueValidator { - return &basicSliceValidator{ - In: i.in, - Default: i.items.Default, - MaxItems: i.items.MaxItems, - MinItems: i.items.MinItems, - UniqueItems: i.items.UniqueItems, - Source: i.root, - Items: i.items.Items, - KnownFormats: i.KnownFormats, - } + return newBasicSliceValidator( + "", + i.in, + i.items.Default, + i.items.MaxItems, + i.items.MinItems, + i.items.UniqueItems, + i.items.Items, + i.root, + i.KnownFormats, + i.Options, + ) } func (i *itemsValidator) numberValidator() valueValidator { - return &numberValidator{ - In: i.in, - Default: i.items.Default, - MultipleOf: i.items.MultipleOf, - Maximum: i.items.Maximum, - ExclusiveMaximum: i.items.ExclusiveMaximum, - Minimum: i.items.Minimum, - ExclusiveMinimum: i.items.ExclusiveMinimum, - Type: i.items.Type, - Format: i.items.Format, - } + return newNumberValidator( + "", + i.in, + i.items.Default, + i.items.MultipleOf, + i.items.Maximum, + i.items.ExclusiveMaximum, + i.items.Minimum, + i.items.ExclusiveMinimum, + i.items.Type, + i.items.Format, + i.Options, + ) } func (i *itemsValidator) stringValidator() valueValidator { - return &stringValidator{ - In: i.in, - Default: i.items.Default, - MaxLength: i.items.MaxLength, - MinLength: i.items.MinLength, - Pattern: i.items.Pattern, - AllowEmptyValue: false, - } + return newStringValidator( + "", + i.in, + i.items.Default, + false, // Required + false, // AllowEmpty + i.items.MaxLength, + i.items.MinLength, + i.items.Pattern, + i.Options, + ) } func (i *itemsValidator) formatValidator() valueValidator { - return &formatValidator{ - In: i.in, - //Default: i.items.Default, - Format: i.items.Format, - KnownFormats: i.KnownFormats, + return newFormatValidator( + "", + i.in, + i.items.Format, + i.KnownFormats, + i.Options, + ) +} + +func (i *itemsValidator) redeem() { + pools.poolOfItemsValidators.RedeemValidator(i) +} + +func (i *itemsValidator) redeemChildren() { + for idx, validator := range i.validators { + if validator == nil { + continue + } + if redeemableChildren, ok := validator.(interface{ redeemChildren() }); ok { + redeemableChildren.redeemChildren() + } + if redeemable, ok := validator.(interface{ redeem() }); ok { + redeemable.redeem() + } + i.validators[idx] = nil // free up allocated children if not in pool } } @@ -142,265 +228,501 @@ type basicCommonValidator struct { In string Default interface{} Enum []interface{} + Options *SchemaValidatorOptions +} + +func newBasicCommonValidator(path, in string, def interface{}, enum []interface{}, opts *SchemaValidatorOptions) *basicCommonValidator { + if opts == nil { + opts = new(SchemaValidatorOptions) + } + + var b *basicCommonValidator + if opts.recycleValidators { + b = pools.poolOfBasicCommonValidators.BorrowValidator() + } else { + b = new(basicCommonValidator) + } + + b.Path = path + b.In = in + b.Default = def + b.Enum = enum + b.Options = opts + + return b } func (b *basicCommonValidator) SetPath(path string) { b.Path = path } -func (b *basicCommonValidator) Applies(source interface{}, kind reflect.Kind) bool { +func (b *basicCommonValidator) Applies(source interface{}, _ reflect.Kind) bool { switch source.(type) { case *spec.Parameter, *spec.Schema, *spec.Header: return true + default: + return false } - return false } func (b *basicCommonValidator) Validate(data interface{}) (res *Result) { - if len(b.Enum) > 0 { - for _, enumValue := range b.Enum { - actualType := reflect.TypeOf(enumValue) - if actualType != nil { // Safeguard - expectedValue := reflect.ValueOf(data) - if expectedValue.IsValid() && expectedValue.Type().ConvertibleTo(actualType) { - if reflect.DeepEqual(expectedValue.Convert(actualType).Interface(), enumValue) { - return nil - } - } - } - } - return errorHelp.sErr(errors.EnumFail(b.Path, b.In, data, b.Enum)) + if b.Options.recycleValidators { + defer func() { + b.redeem() + }() } - return nil + + if len(b.Enum) == 0 { + return nil + } + + for _, enumValue := range b.Enum { + actualType := reflect.TypeOf(enumValue) + if actualType == nil { // Safeguard + continue + } + + expectedValue := reflect.ValueOf(data) + if expectedValue.IsValid() && + expectedValue.Type().ConvertibleTo(actualType) && + reflect.DeepEqual(expectedValue.Convert(actualType).Interface(), enumValue) { + return nil + } + } + + return errorHelp.sErr(errors.EnumFail(b.Path, b.In, data, b.Enum), b.Options.recycleResult) +} + +func (b *basicCommonValidator) redeem() { + pools.poolOfBasicCommonValidators.RedeemValidator(b) } // A HeaderValidator has very limited subset of validations to apply type HeaderValidator struct { name string header *spec.Header - validators []valueValidator + validators [6]valueValidator KnownFormats strfmt.Registry + Options *SchemaValidatorOptions } // NewHeaderValidator creates a new header validator object -func NewHeaderValidator(name string, header *spec.Header, formats strfmt.Registry) *HeaderValidator { - p := &HeaderValidator{name: name, header: header, KnownFormats: formats} - p.validators = []valueValidator{ - &typeValidator{ - Type: spec.StringOrArray([]string{header.Type}), - Nullable: header.Nullable, - Format: header.Format, - In: "header", - Path: name, - }, +func NewHeaderValidator(name string, header *spec.Header, formats strfmt.Registry, options ...Option) *HeaderValidator { + opts := new(SchemaValidatorOptions) + for _, o := range options { + o(opts) + } + + return newHeaderValidator(name, header, formats, opts) +} + +func newHeaderValidator(name string, header *spec.Header, formats strfmt.Registry, opts *SchemaValidatorOptions) *HeaderValidator { + if opts == nil { + opts = new(SchemaValidatorOptions) + } + + var p *HeaderValidator + if opts.recycleValidators { + p = pools.poolOfHeaderValidators.BorrowValidator() + } else { + p = new(HeaderValidator) + } + + p.name = name + p.header = header + p.KnownFormats = formats + p.Options = opts + p.validators = [6]valueValidator{ + newTypeValidator( + name, + "header", + spec.StringOrArray([]string{header.Type}), + header.Nullable, + header.Format, + p.Options, + ), p.stringValidator(), p.formatValidator(), p.numberValidator(), p.sliceValidator(), p.commonValidator(), } + return p } // Validate the value of the header against its schema func (p *HeaderValidator) Validate(data interface{}) *Result { - result := new(Result) + if p.Options.recycleValidators { + defer func() { + p.redeemChildren() + p.redeem() + }() + } + + if data == nil { + return nil + } + + var result *Result + if p.Options.recycleResult { + result = pools.poolOfResults.BorrowResult() + } else { + result = new(Result) + } + tpe := reflect.TypeOf(data) kind := tpe.Kind() - for _, validator := range p.validators { - if validator.Applies(p.header, kind) { - if err := validator.Validate(data); err != nil { - result.Merge(err) - if err.HasErrors() { - return result + for idx, validator := range p.validators { + if !validator.Applies(p.header, kind) { + if p.Options.recycleValidators { + // Validate won't be called, so relinquish this validator + if redeemableChildren, ok := validator.(interface{ redeemChildren() }); ok { + redeemableChildren.redeemChildren() } + if redeemable, ok := validator.(interface{ redeem() }); ok { + redeemable.redeem() + } + p.validators[idx] = nil // prevents further (unsafe) usage } + + continue + } + + err := validator.Validate(data) + if p.Options.recycleValidators { + p.validators[idx] = nil // prevents further (unsafe) usage + } + if err != nil { + if err.HasErrors() { + result.Merge(err) + break + } + result.Merge(err) } } - return nil + + return result } func (p *HeaderValidator) commonValidator() valueValidator { - return &basicCommonValidator{ - Path: p.name, - In: "response", - Default: p.header.Default, - Enum: p.header.Enum, - } + return newBasicCommonValidator( + p.name, + "response", + p.header.Default, + p.header.Enum, + p.Options, + ) } func (p *HeaderValidator) sliceValidator() valueValidator { - return &basicSliceValidator{ - Path: p.name, - In: "response", - Default: p.header.Default, - MaxItems: p.header.MaxItems, - MinItems: p.header.MinItems, - UniqueItems: p.header.UniqueItems, - Items: p.header.Items, - Source: p.header, - KnownFormats: p.KnownFormats, - } + return newBasicSliceValidator( + p.name, + "response", + p.header.Default, + p.header.MaxItems, + p.header.MinItems, + p.header.UniqueItems, + p.header.Items, + p.header, + p.KnownFormats, + p.Options, + ) } func (p *HeaderValidator) numberValidator() valueValidator { - return &numberValidator{ - Path: p.name, - In: "response", - Default: p.header.Default, - MultipleOf: p.header.MultipleOf, - Maximum: p.header.Maximum, - ExclusiveMaximum: p.header.ExclusiveMaximum, - Minimum: p.header.Minimum, - ExclusiveMinimum: p.header.ExclusiveMinimum, - Type: p.header.Type, - Format: p.header.Format, - } + return newNumberValidator( + p.name, + "response", + p.header.Default, + p.header.MultipleOf, + p.header.Maximum, + p.header.ExclusiveMaximum, + p.header.Minimum, + p.header.ExclusiveMinimum, + p.header.Type, + p.header.Format, + p.Options, + ) } func (p *HeaderValidator) stringValidator() valueValidator { - return &stringValidator{ - Path: p.name, - In: "response", - Default: p.header.Default, - Required: true, - MaxLength: p.header.MaxLength, - MinLength: p.header.MinLength, - Pattern: p.header.Pattern, - AllowEmptyValue: false, - } + return newStringValidator( + p.name, + "response", + p.header.Default, + true, + false, + p.header.MaxLength, + p.header.MinLength, + p.header.Pattern, + p.Options, + ) } func (p *HeaderValidator) formatValidator() valueValidator { - return &formatValidator{ - Path: p.name, - In: "response", - //Default: p.header.Default, - Format: p.header.Format, - KnownFormats: p.KnownFormats, + return newFormatValidator( + p.name, + "response", + p.header.Format, + p.KnownFormats, + p.Options, + ) +} + +func (p *HeaderValidator) redeem() { + pools.poolOfHeaderValidators.RedeemValidator(p) +} + +func (p *HeaderValidator) redeemChildren() { + for idx, validator := range p.validators { + if validator == nil { + continue + } + if redeemableChildren, ok := validator.(interface{ redeemChildren() }); ok { + redeemableChildren.redeemChildren() + } + if redeemable, ok := validator.(interface{ redeem() }); ok { + redeemable.redeem() + } + p.validators[idx] = nil // free up allocated children if not in pool } } // A ParamValidator has very limited subset of validations to apply type ParamValidator struct { param *spec.Parameter - validators []valueValidator + validators [6]valueValidator KnownFormats strfmt.Registry + Options *SchemaValidatorOptions } // NewParamValidator creates a new param validator object -func NewParamValidator(param *spec.Parameter, formats strfmt.Registry) *ParamValidator { - p := &ParamValidator{param: param, KnownFormats: formats} - p.validators = []valueValidator{ - &typeValidator{ - Type: spec.StringOrArray([]string{param.Type}), - Nullable: param.Nullable, - Format: param.Format, - In: param.In, - Path: param.Name, - }, +func NewParamValidator(param *spec.Parameter, formats strfmt.Registry, options ...Option) *ParamValidator { + opts := new(SchemaValidatorOptions) + for _, o := range options { + o(opts) + } + + return newParamValidator(param, formats, opts) +} + +func newParamValidator(param *spec.Parameter, formats strfmt.Registry, opts *SchemaValidatorOptions) *ParamValidator { + if opts == nil { + opts = new(SchemaValidatorOptions) + } + + var p *ParamValidator + if opts.recycleValidators { + p = pools.poolOfParamValidators.BorrowValidator() + } else { + p = new(ParamValidator) + } + + p.param = param + p.KnownFormats = formats + p.Options = opts + p.validators = [6]valueValidator{ + newTypeValidator( + param.Name, + param.In, + spec.StringOrArray([]string{param.Type}), + param.Nullable, + param.Format, + p.Options, + ), p.stringValidator(), p.formatValidator(), p.numberValidator(), p.sliceValidator(), p.commonValidator(), } + return p } // Validate the data against the description of the parameter func (p *ParamValidator) Validate(data interface{}) *Result { - result := new(Result) + if data == nil { + return nil + } + + var result *Result + if p.Options.recycleResult { + result = pools.poolOfResults.BorrowResult() + } else { + result = new(Result) + } + tpe := reflect.TypeOf(data) kind := tpe.Kind() + if p.Options.recycleValidators { + defer func() { + p.redeemChildren() + p.redeem() + }() + } + // TODO: validate type - for _, validator := range p.validators { - if validator.Applies(p.param, kind) { - if err := validator.Validate(data); err != nil { - result.Merge(err) - if err.HasErrors() { - return result + for idx, validator := range p.validators { + if !validator.Applies(p.param, kind) { + if p.Options.recycleValidators { + // Validate won't be called, so relinquish this validator + if redeemableChildren, ok := validator.(interface{ redeemChildren() }); ok { + redeemableChildren.redeemChildren() } + if redeemable, ok := validator.(interface{ redeem() }); ok { + redeemable.redeem() + } + p.validators[idx] = nil // prevents further (unsafe) usage } + + continue + } + + err := validator.Validate(data) + if p.Options.recycleValidators { + p.validators[idx] = nil // prevents further (unsafe) usage + } + if err != nil { + if err.HasErrors() { + result.Merge(err) + break + } + result.Merge(err) } } - return nil + + return result } func (p *ParamValidator) commonValidator() valueValidator { - return &basicCommonValidator{ - Path: p.param.Name, - In: p.param.In, - Default: p.param.Default, - Enum: p.param.Enum, - } + return newBasicCommonValidator( + p.param.Name, + p.param.In, + p.param.Default, + p.param.Enum, + p.Options, + ) } func (p *ParamValidator) sliceValidator() valueValidator { - return &basicSliceValidator{ - Path: p.param.Name, - In: p.param.In, - Default: p.param.Default, - MaxItems: p.param.MaxItems, - MinItems: p.param.MinItems, - UniqueItems: p.param.UniqueItems, - Items: p.param.Items, - Source: p.param, - KnownFormats: p.KnownFormats, - } + return newBasicSliceValidator( + p.param.Name, + p.param.In, + p.param.Default, + p.param.MaxItems, + p.param.MinItems, + p.param.UniqueItems, + p.param.Items, + p.param, + p.KnownFormats, + p.Options, + ) } func (p *ParamValidator) numberValidator() valueValidator { - return &numberValidator{ - Path: p.param.Name, - In: p.param.In, - Default: p.param.Default, - MultipleOf: p.param.MultipleOf, - Maximum: p.param.Maximum, - ExclusiveMaximum: p.param.ExclusiveMaximum, - Minimum: p.param.Minimum, - ExclusiveMinimum: p.param.ExclusiveMinimum, - Type: p.param.Type, - Format: p.param.Format, - } + return newNumberValidator( + p.param.Name, + p.param.In, + p.param.Default, + p.param.MultipleOf, + p.param.Maximum, + p.param.ExclusiveMaximum, + p.param.Minimum, + p.param.ExclusiveMinimum, + p.param.Type, + p.param.Format, + p.Options, + ) } func (p *ParamValidator) stringValidator() valueValidator { - return &stringValidator{ - Path: p.param.Name, - In: p.param.In, - Default: p.param.Default, - AllowEmptyValue: p.param.AllowEmptyValue, - Required: p.param.Required, - MaxLength: p.param.MaxLength, - MinLength: p.param.MinLength, - Pattern: p.param.Pattern, - } + return newStringValidator( + p.param.Name, + p.param.In, + p.param.Default, + p.param.Required, + p.param.AllowEmptyValue, + p.param.MaxLength, + p.param.MinLength, + p.param.Pattern, + p.Options, + ) } func (p *ParamValidator) formatValidator() valueValidator { - return &formatValidator{ - Path: p.param.Name, - In: p.param.In, - //Default: p.param.Default, - Format: p.param.Format, - KnownFormats: p.KnownFormats, + return newFormatValidator( + p.param.Name, + p.param.In, + p.param.Format, + p.KnownFormats, + p.Options, + ) +} + +func (p *ParamValidator) redeem() { + pools.poolOfParamValidators.RedeemValidator(p) +} + +func (p *ParamValidator) redeemChildren() { + for idx, validator := range p.validators { + if validator == nil { + continue + } + if redeemableChildren, ok := validator.(interface{ redeemChildren() }); ok { + redeemableChildren.redeemChildren() + } + if redeemable, ok := validator.(interface{ redeem() }); ok { + redeemable.redeem() + } + p.validators[idx] = nil // free up allocated children if not in pool } } type basicSliceValidator struct { - Path string - In string - Default interface{} - MaxItems *int64 - MinItems *int64 - UniqueItems bool - Items *spec.Items - Source interface{} - itemsValidator *itemsValidator - KnownFormats strfmt.Registry + Path string + In string + Default interface{} + MaxItems *int64 + MinItems *int64 + UniqueItems bool + Items *spec.Items + Source interface{} + KnownFormats strfmt.Registry + Options *SchemaValidatorOptions +} + +func newBasicSliceValidator( + path, in string, + def interface{}, maxItems, minItems *int64, uniqueItems bool, items *spec.Items, + source interface{}, formats strfmt.Registry, + opts *SchemaValidatorOptions) *basicSliceValidator { + if opts == nil { + opts = new(SchemaValidatorOptions) + } + + var s *basicSliceValidator + if opts.recycleValidators { + s = pools.poolOfBasicSliceValidators.BorrowValidator() + } else { + s = new(basicSliceValidator) + } + + s.Path = path + s.In = in + s.Default = def + s.MaxItems = maxItems + s.MinItems = minItems + s.UniqueItems = uniqueItems + s.Items = items + s.Source = source + s.KnownFormats = formats + s.Options = opts + + return s } func (s *basicSliceValidator) SetPath(path string) { @@ -411,60 +733,61 @@ func (s *basicSliceValidator) Applies(source interface{}, kind reflect.Kind) boo switch source.(type) { case *spec.Parameter, *spec.Items, *spec.Header: return kind == reflect.Slice + default: + return false } - return false } func (s *basicSliceValidator) Validate(data interface{}) *Result { + if s.Options.recycleValidators { + defer func() { + s.redeem() + }() + } val := reflect.ValueOf(data) size := int64(val.Len()) if s.MinItems != nil { if err := MinItems(s.Path, s.In, size, *s.MinItems); err != nil { - return errorHelp.sErr(err) + return errorHelp.sErr(err, s.Options.recycleResult) } } if s.MaxItems != nil { if err := MaxItems(s.Path, s.In, size, *s.MaxItems); err != nil { - return errorHelp.sErr(err) + return errorHelp.sErr(err, s.Options.recycleResult) } } if s.UniqueItems { if err := UniqueItems(s.Path, s.In, data); err != nil { - return errorHelp.sErr(err) + return errorHelp.sErr(err, s.Options.recycleResult) } } - if s.itemsValidator == nil && s.Items != nil { - s.itemsValidator = newItemsValidator(s.Path, s.In, s.Items, s.Source, s.KnownFormats) + if s.Items == nil { + return nil } - if s.itemsValidator != nil { - for i := 0; i < int(size); i++ { - ele := val.Index(i) - if err := s.itemsValidator.Validate(i, ele.Interface()); err != nil && err.HasErrors() { + for i := 0; i < int(size); i++ { + itemsValidator := newItemsValidator(s.Path, s.In, s.Items, s.Source, s.KnownFormats, s.Options) + ele := val.Index(i) + if err := itemsValidator.Validate(i, ele.Interface()); err != nil { + if err.HasErrors() { return err } + if err.wantsRedeemOnMerge { + pools.poolOfResults.RedeemResult(err) + } } } + return nil } -/* unused -func (s *basicSliceValidator) hasDuplicates(value reflect.Value, size int) bool { - dict := make(map[interface{}]struct{}) - for i := 0; i < size; i++ { - ele := value.Index(i) - if _, ok := dict[ele.Interface()]; ok { - return true - } - dict[ele.Interface()] = struct{}{} - } - return false +func (s *basicSliceValidator) redeem() { + pools.poolOfBasicSliceValidators.RedeemValidator(s) } -*/ type numberValidator struct { Path string @@ -476,8 +799,40 @@ type numberValidator struct { Minimum *float64 ExclusiveMinimum bool // Allows for more accurate behavior regarding integers - Type string - Format string + Type string + Format string + Options *SchemaValidatorOptions +} + +func newNumberValidator( + path, in string, def interface{}, + multipleOf, maximum *float64, exclusiveMaximum bool, minimum *float64, exclusiveMinimum bool, + typ, format string, + opts *SchemaValidatorOptions) *numberValidator { + if opts == nil { + opts = new(SchemaValidatorOptions) + } + + var n *numberValidator + if opts.recycleValidators { + n = pools.poolOfNumberValidators.BorrowValidator() + } else { + n = new(numberValidator) + } + + n.Path = path + n.In = in + n.Default = def + n.MultipleOf = multipleOf + n.Maximum = maximum + n.ExclusiveMaximum = exclusiveMaximum + n.Minimum = minimum + n.ExclusiveMinimum = exclusiveMinimum + n.Type = typ + n.Format = format + n.Options = opts + + return n } func (n *numberValidator) SetPath(path string) { @@ -489,12 +844,10 @@ func (n *numberValidator) Applies(source interface{}, kind reflect.Kind) bool { case *spec.Parameter, *spec.Schema, *spec.Items, *spec.Header: isInt := kind >= reflect.Int && kind <= reflect.Uint64 isFloat := kind == reflect.Float32 || kind == reflect.Float64 - r := isInt || isFloat - debugLog("schema props validator for %q applies %t for %T (kind: %v) isInt=%t, isFloat=%t\n", n.Path, r, source, kind, isInt, isFloat) - return r + return isInt || isFloat + default: + return false } - debugLog("schema props validator for %q applies %t for %T (kind: %v)\n", n.Path, false, source, kind) - return false } // Validate provides a validator for generic JSON numbers, @@ -519,11 +872,18 @@ func (n *numberValidator) Applies(source interface{}, kind reflect.Kind) bool { // // TODO: default boundaries with MAX_SAFE_INTEGER are not checked (specific to json.Number?) func (n *numberValidator) Validate(val interface{}) *Result { - res := new(Result) + if n.Options.recycleValidators { + defer func() { + n.redeem() + }() + } - resMultiple := new(Result) - resMinimum := new(Result) - resMaximum := new(Result) + var res, resMultiple, resMinimum, resMaximum *Result + if n.Options.recycleResult { + res = pools.poolOfResults.BorrowResult() + } else { + res = new(Result) + } // Used only to attempt to validate constraint on value, // even though value or constraint specified do not match type and format @@ -533,68 +893,106 @@ func (n *numberValidator) Validate(val interface{}) *Result { res.AddErrors(IsValueValidAgainstRange(val, n.Type, n.Format, "Checked", n.Path)) if n.MultipleOf != nil { + resMultiple = pools.poolOfResults.BorrowResult() + // Is the constraint specifier within the range of the specific numeric type and format? resMultiple.AddErrors(IsValueValidAgainstRange(*n.MultipleOf, n.Type, n.Format, "MultipleOf", n.Path)) if resMultiple.IsValid() { // Constraint validated with compatible types if err := MultipleOfNativeType(n.Path, n.In, val, *n.MultipleOf); err != nil { - resMultiple.Merge(errorHelp.sErr(err)) + resMultiple.Merge(errorHelp.sErr(err, n.Options.recycleResult)) } } else { // Constraint nevertheless validated, converted as general number if err := MultipleOf(n.Path, n.In, data, *n.MultipleOf); err != nil { - resMultiple.Merge(errorHelp.sErr(err)) + resMultiple.Merge(errorHelp.sErr(err, n.Options.recycleResult)) } } } - // nolint: dupl if n.Maximum != nil { + resMaximum = pools.poolOfResults.BorrowResult() + // Is the constraint specifier within the range of the specific numeric type and format? resMaximum.AddErrors(IsValueValidAgainstRange(*n.Maximum, n.Type, n.Format, "Maximum boundary", n.Path)) if resMaximum.IsValid() { // Constraint validated with compatible types if err := MaximumNativeType(n.Path, n.In, val, *n.Maximum, n.ExclusiveMaximum); err != nil { - resMaximum.Merge(errorHelp.sErr(err)) + resMaximum.Merge(errorHelp.sErr(err, n.Options.recycleResult)) } } else { // Constraint nevertheless validated, converted as general number if err := Maximum(n.Path, n.In, data, *n.Maximum, n.ExclusiveMaximum); err != nil { - resMaximum.Merge(errorHelp.sErr(err)) + resMaximum.Merge(errorHelp.sErr(err, n.Options.recycleResult)) } } } - // nolint: dupl if n.Minimum != nil { + resMinimum = pools.poolOfResults.BorrowResult() + // Is the constraint specifier within the range of the specific numeric type and format? resMinimum.AddErrors(IsValueValidAgainstRange(*n.Minimum, n.Type, n.Format, "Minimum boundary", n.Path)) if resMinimum.IsValid() { // Constraint validated with compatible types if err := MinimumNativeType(n.Path, n.In, val, *n.Minimum, n.ExclusiveMinimum); err != nil { - resMinimum.Merge(errorHelp.sErr(err)) + resMinimum.Merge(errorHelp.sErr(err, n.Options.recycleResult)) } } else { // Constraint nevertheless validated, converted as general number if err := Minimum(n.Path, n.In, data, *n.Minimum, n.ExclusiveMinimum); err != nil { - resMinimum.Merge(errorHelp.sErr(err)) + resMinimum.Merge(errorHelp.sErr(err, n.Options.recycleResult)) } } } res.Merge(resMultiple, resMinimum, resMaximum) res.Inc() + return res } +func (n *numberValidator) redeem() { + pools.poolOfNumberValidators.RedeemValidator(n) +} + type stringValidator struct { + Path string + In string Default interface{} Required bool AllowEmptyValue bool MaxLength *int64 MinLength *int64 Pattern string - Path string - In string + Options *SchemaValidatorOptions +} + +func newStringValidator( + path, in string, + def interface{}, required, allowEmpty bool, maxLength, minLength *int64, pattern string, + opts *SchemaValidatorOptions) *stringValidator { + if opts == nil { + opts = new(SchemaValidatorOptions) + } + + var s *stringValidator + if opts.recycleValidators { + s = pools.poolOfStringValidators.BorrowValidator() + } else { + s = new(stringValidator) + } + + s.Path = path + s.In = in + s.Default = def + s.Required = required + s.AllowEmptyValue = allowEmpty + s.MaxLength = maxLength + s.MinLength = minLength + s.Pattern = pattern + s.Options = opts + + return s } func (s *stringValidator) SetPath(path string) { @@ -604,42 +1002,50 @@ func (s *stringValidator) SetPath(path string) { func (s *stringValidator) Applies(source interface{}, kind reflect.Kind) bool { switch source.(type) { case *spec.Parameter, *spec.Schema, *spec.Items, *spec.Header: - r := kind == reflect.String - debugLog("string validator for %q applies %t for %T (kind: %v)\n", s.Path, r, source, kind) - return r + return kind == reflect.String + default: + return false } - debugLog("string validator for %q applies %t for %T (kind: %v)\n", s.Path, false, source, kind) - return false } func (s *stringValidator) Validate(val interface{}) *Result { + if s.Options.recycleValidators { + defer func() { + s.redeem() + }() + } + data, ok := val.(string) if !ok { - return errorHelp.sErr(errors.InvalidType(s.Path, s.In, stringType, val)) + return errorHelp.sErr(errors.InvalidType(s.Path, s.In, stringType, val), s.Options.recycleResult) } if s.Required && !s.AllowEmptyValue && (s.Default == nil || s.Default == "") { if err := RequiredString(s.Path, s.In, data); err != nil { - return errorHelp.sErr(err) + return errorHelp.sErr(err, s.Options.recycleResult) } } if s.MaxLength != nil { if err := MaxLength(s.Path, s.In, data, *s.MaxLength); err != nil { - return errorHelp.sErr(err) + return errorHelp.sErr(err, s.Options.recycleResult) } } if s.MinLength != nil { if err := MinLength(s.Path, s.In, data, *s.MinLength); err != nil { - return errorHelp.sErr(err) + return errorHelp.sErr(err, s.Options.recycleResult) } } if s.Pattern != "" { if err := Pattern(s.Path, s.In, data, s.Pattern); err != nil { - return errorHelp.sErr(err) + return errorHelp.sErr(err, s.Options.recycleResult) } } return nil } + +func (s *stringValidator) redeem() { + pools.poolOfStringValidators.RedeemValidator(s) +} diff --git a/vendor/github.com/go-openapi/validate/values.go b/vendor/github.com/go-openapi/validate/values.go index e7ad8c103..5f6f5ee61 100644 --- a/vendor/github.com/go-openapi/validate/values.go +++ b/vendor/github.com/go-openapi/validate/values.go @@ -120,7 +120,7 @@ func UniqueItems(path, in string, data interface{}) *errors.Validation { // MinLength validates a string for minimum length func MinLength(path, in, data string, minLength int64) *errors.Validation { - strLen := int64(utf8.RuneCount([]byte(data))) + strLen := int64(utf8.RuneCountInString(data)) if strLen < minLength { return errors.TooShort(path, in, minLength, data) } @@ -129,7 +129,7 @@ func MinLength(path, in, data string, minLength int64) *errors.Validation { // MaxLength validates a string for maximum length func MaxLength(path, in, data string, maxLength int64) *errors.Validation { - strLen := int64(utf8.RuneCount([]byte(data))) + strLen := int64(utf8.RuneCountInString(data)) if strLen > maxLength { return errors.TooLong(path, in, maxLength, data) } @@ -315,7 +315,7 @@ func FormatOf(path, in, format, data string, registry strfmt.Registry) *errors.V // TODO: Normally, a JSON MAX_SAFE_INTEGER check would ensure conversion remains loss-free func MaximumNativeType(path, in string, val interface{}, max float64, exclusive bool) *errors.Validation { kind := reflect.ValueOf(val).Type().Kind() - switch kind { + switch kind { //nolint:exhaustive case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: value := valueHelp.asInt64(val) return MaximumInt(path, in, value, int64(max), exclusive) @@ -345,7 +345,7 @@ func MaximumNativeType(path, in string, val interface{}, max float64, exclusive // TODO: Normally, a JSON MAX_SAFE_INTEGER check would ensure conversion remains loss-free func MinimumNativeType(path, in string, val interface{}, min float64, exclusive bool) *errors.Validation { kind := reflect.ValueOf(val).Type().Kind() - switch kind { + switch kind { //nolint:exhaustive case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: value := valueHelp.asInt64(val) return MinimumInt(path, in, value, int64(min), exclusive) @@ -375,7 +375,7 @@ func MinimumNativeType(path, in string, val interface{}, min float64, exclusive // TODO: Normally, a JSON MAX_SAFE_INTEGER check would ensure conversion remains loss-free func MultipleOfNativeType(path, in string, val interface{}, multipleOf float64) *errors.Validation { kind := reflect.ValueOf(val).Type().Kind() - switch kind { + switch kind { //nolint:exhaustive case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: value := valueHelp.asInt64(val) return MultipleOfInt(path, in, value, int64(multipleOf)) @@ -399,7 +399,7 @@ func IsValueValidAgainstRange(val interface{}, typeName, format, prefix, path st // What is the string representation of val var stringRep string - switch kind { + switch kind { //nolint:exhaustive case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: stringRep = swag.FormatUint64(valueHelp.asUint64(val)) case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff.go index 5999f4948..2faba2537 100644 --- a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff.go +++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff.go @@ -2,13 +2,12 @@ import ( "encoding/json" + "errors" "fmt" "io" "log" "os" - "errors" - "github.com/go-openapi/loads" "github.com/go-swagger/go-swagger/cmd/swagger/commands/diff" ) diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/checks.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/checks.go index 2ae1b8227..627bc5f7b 100644 --- a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/checks.go +++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/checks.go @@ -42,8 +42,8 @@ func CompareProperties(location DifferenceLocation, schema1 *spec.Schema, schema schema1Props := propertiesFor(schema1, getRefFn1) schema2Props := propertiesFor(schema2, getRefFn2) - // find deleted and changed properties + // find deleted and changed properties for eachProp1Name, eachProp1 := range schema1Props { eachProp1 := eachProp1 childLoc := addChildDiffNode(location, eachProp1Name, eachProp1.Schema) @@ -66,7 +66,13 @@ func CompareProperties(location DifferenceLocation, schema1 *spec.Schema, schema eachProp2 := eachProp2 if _, ok := schema1.Properties[eachProp2Name]; !ok { childLoc := addChildDiffNode(location, eachProp2Name, &eachProp2) - propDiffs = append(propDiffs, SpecDifference{DifferenceLocation: childLoc, Code: AddedProperty}) + + analyzedProp2 := schema2Props[eachProp2Name] + if analyzedProp2.Required { + propDiffs = append(propDiffs, SpecDifference{DifferenceLocation: childLoc, Code: AddedRequiredProperty}) + } else { + propDiffs = append(propDiffs, SpecDifference{DifferenceLocation: childLoc, Code: AddedProperty}) + } } } return propDiffs diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/compatibility.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/compatibility.go index d31c0e63a..5be29d867 100644 --- a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/compatibility.go +++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/compatibility.go @@ -36,11 +36,12 @@ func init() { AddedConstraint: NonBreaking, DeletedExtension: Warning, AddedExtension: Warning, + ChangedExtensionValue: Warning, }, ForRequest: map[SpecChangeCode]Compatibility{ AddedRequiredProperty: Breaking, DeletedProperty: Breaking, - AddedProperty: Breaking, + AddedProperty: NonBreaking, AddedOptionalParam: NonBreaking, AddedRequiredParam: Breaking, DeletedOptionalParam: NonBreaking, @@ -70,6 +71,7 @@ func init() { ChangedCollectionFormat: Breaking, DeletedExtension: Warning, AddedExtension: Warning, + ChangedExtensionValue: Warning, }, ForChange: map[SpecChangeCode]Compatibility{ NoChangeDetected: NonBreaking, @@ -96,6 +98,7 @@ func init() { DeletedDefinition: NonBreaking, DeletedExtension: Warning, AddedExtension: Warning, + ChangedExtensionValue: Warning, }, } } diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difftypes.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difftypes.go index 007862fb9..3d3d5a1c1 100644 --- a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difftypes.go +++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difftypes.go @@ -117,6 +117,8 @@ DeletedExtension // AddedExtension added an extension AddedExtension + // ChangedExtensionValue changed an extension value + ChangedExtensionValue ) var toLongStringSpecChangeCode = map[SpecChangeCode]string{ @@ -173,6 +175,7 @@ ChangedCollectionFormat: "Changed collection format", DeletedExtension: "Deleted Extension", AddedExtension: "Added Extension", + ChangedExtensionValue: "Changed Extension Value", } var toStringSpecChangeCode = map[SpecChangeCode]string{ @@ -229,6 +232,7 @@ ChangedCollectionFormat: "ChangedCollectionFormat", DeletedExtension: "DeletedExtension", AddedExtension: "AddedExtension", + ChangedExtensionValue: "ChangedExtensionValue", } var toIDSpecChangeCode = map[string]SpecChangeCode{} diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_analyser.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_analyser.go index 8df44aeb2..655af1465 100644 --- a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_analyser.go +++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_analyser.go @@ -2,6 +2,7 @@ import ( "fmt" + "reflect" "strings" "github.com/go-openapi/spec" @@ -230,7 +231,15 @@ func (sd *SpecAnalyser) analyseResponseParams() { // deleted responses for code1 := range op1Responses { if _, ok := op2Responses[code1]; !ok { - location := DifferenceLocation{URL: eachURLMethodFrom2.Path, Method: eachURLMethodFrom2.Method, Response: code1, Node: getSchemaDiffNode("Body", op1Responses[code1].Schema)} + location := DifferenceLocation{ + URL: eachURLMethodFrom2.Path, + Method: eachURLMethodFrom2.Method, + Response: code1, + Node: getNameOnlyDiffNode("NoContent"), + } + if op1Responses[code1].Schema != nil { + location.Node = getSchemaDiffNode("Body", op1Responses[code1].Schema) + } sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: location, Code: DeletedResponse}) } } @@ -272,11 +281,22 @@ func (sd *SpecAnalyser) analyseResponseParams() { sd.compareDescripton(responseLocation, op1Response.Description, op2Response.Description) if op1Response.Schema != nil { - sd.compareSchema( - DifferenceLocation{URL: eachURLMethodFrom2.Path, Method: eachURLMethodFrom2.Method, Response: code2, Node: getSchemaDiffNode("Body", op1Response.Schema)}, - op1Response.Schema, - op2Response.Schema) + if op2Response.Schema == nil { + sd.Diffs = sd.Diffs.addDiff(SpecDifference{ + DifferenceLocation: DifferenceLocation{URL: eachURLMethodFrom2.Path, Method: eachURLMethodFrom2.Method, Response: code2, Node: getSchemaDiffNode("Body", op1Response.Schema)}, + Code: DeletedProperty}) + } else { + sd.compareSchema( + DifferenceLocation{URL: eachURLMethodFrom2.Path, Method: eachURLMethodFrom2.Method, Response: code2, Node: getSchemaDiffNode("Body", op1Response.Schema)}, + op1Response.Schema, + op2Response.Schema) + } + } else if op2Response.Schema != nil { + sd.Diffs = sd.Diffs.addDiff(SpecDifference{ + DifferenceLocation: DifferenceLocation{URL: eachURLMethodFrom2.Path, Method: eachURLMethodFrom2.Method, Response: code2, Node: getSchemaDiffNode("Body", op2Response.Schema)}, + Code: AddedProperty}) } + } else { // op2Response sd.Diffs = sd.Diffs.addDiff(SpecDifference{ @@ -293,6 +313,7 @@ func (sd *SpecAnalyser) analyseExtensions(spec1, spec2 *spec.Swagger) { specLoc := DifferenceLocation{Node: &Node{Field: "Spec"}} sd.checkAddedExtensions(spec1.Extensions, spec2.Extensions, specLoc, "") sd.checkDeletedExtensions(spec1.Extensions, spec2.Extensions, specLoc, "") + sd.checkChangedExtensions(spec1.Extensions, spec2.Extensions, specLoc, "") sd.analyzeInfoExtensions() sd.analyzeTagExtensions(spec1, spec2) @@ -302,19 +323,27 @@ func (sd *SpecAnalyser) analyseExtensions(spec1, spec2 *spec.Swagger) { } func (sd *SpecAnalyser) analyzeOperationExtensions() { + pathsIterated := make(map[string]struct{}) for urlMethod, op2 := range sd.urlMethods2 { pathAndMethodLoc := DifferenceLocation{URL: urlMethod.Path, Method: urlMethod.Method} if op1, ok := sd.urlMethods1[urlMethod]; ok { - sd.checkAddedExtensions(op1.Extensions, op2.Extensions, DifferenceLocation{URL: urlMethod.Path}, "") + if _, ok := pathsIterated[urlMethod.Path]; !ok { + sd.checkAddedExtensions(op1.Extensions, op2.Extensions, DifferenceLocation{URL: urlMethod.Path}, "") + sd.checkChangedExtensions(op1.Extensions, op2.Extensions, DifferenceLocation{URL: urlMethod.Path}, "") + pathsIterated[urlMethod.Path] = struct{}{} + } sd.checkAddedExtensions(op1.Operation.Responses.Extensions, op2.Operation.Responses.Extensions, pathAndMethodLoc, "Responses") + sd.checkChangedExtensions(op1.Operation.Responses.Extensions, op2.Operation.Responses.Extensions, pathAndMethodLoc, "Responses") sd.checkAddedExtensions(op1.Operation.Extensions, op2.Operation.Extensions, pathAndMethodLoc, "") - + sd.checkChangedExtensions(op1.Operation.Extensions, op2.Operation.Extensions, pathAndMethodLoc, "") + sd.checkParamExtensions(op1, op2, urlMethod) for code, resp := range op1.Operation.Responses.StatusCodeResponses { for hdr, h := range resp.Headers { op2StatusCode, ok := op2.Operation.Responses.StatusCodeResponses[code] if ok { if _, ok = op2StatusCode.Headers[hdr]; ok { sd.checkAddedExtensions(h.Extensions, op2StatusCode.Headers[hdr].Extensions, DifferenceLocation{URL: urlMethod.Path, Method: urlMethod.Method, Node: getNameOnlyDiffNode("Headers")}, hdr) + sd.checkChangedExtensions(h.Extensions, op2StatusCode.Headers[hdr].Extensions, DifferenceLocation{URL: urlMethod.Path, Method: urlMethod.Method, Node: getNameOnlyDiffNode("Headers")}, hdr) } } } @@ -326,10 +355,14 @@ func (sd *SpecAnalyser) analyzeOperationExtensions() { } } + pathsIterated = make(map[string]struct{}) for urlMethod, op1 := range sd.urlMethods1 { pathAndMethodLoc := DifferenceLocation{URL: urlMethod.Path, Method: urlMethod.Method} if op2, ok := sd.urlMethods2[urlMethod]; ok { - sd.checkDeletedExtensions(op1.Extensions, op2.Extensions, DifferenceLocation{URL: urlMethod.Path}, "") + if _, ok := pathsIterated[urlMethod.Path]; !ok { + sd.checkDeletedExtensions(op1.Extensions, op2.Extensions, DifferenceLocation{URL: urlMethod.Path}, "") + pathsIterated[urlMethod.Path] = struct{}{} + } sd.checkDeletedExtensions(op1.Operation.Responses.Extensions, op2.Operation.Responses.Extensions, pathAndMethodLoc, "Responses") sd.checkDeletedExtensions(op1.Operation.Extensions, op2.Operation.Extensions, pathAndMethodLoc, "") for code, resp := range op1.Operation.Responses.StatusCodeResponses { @@ -346,11 +379,42 @@ func (sd *SpecAnalyser) analyzeOperationExtensions() { } } +func (sd *SpecAnalyser) checkParamExtensions(op1 *PathItemOp, op2 *PathItemOp, urlMethod URLMethod) { + locations := []string{"query", "path", "body", "header", "formData"} + titles := []string{"Query", "Path", "Body", "Header", "FormData"} + + for i, paramLocation := range locations { + rootNode := getNameOnlyDiffNode(titles[i]) + params1 := getParams(op1.ParentPathItem.Parameters, op1.Operation.Parameters, paramLocation) + params2 := getParams(op2.ParentPathItem.Parameters, op2.Operation.Parameters, paramLocation) + + location := DifferenceLocation{URL: urlMethod.Path, Method: urlMethod.Method, Node: rootNode} + // detect deleted param extensions + for paramName1, param1 := range params1 { + if param2, ok := params2[paramName1]; ok { + childLocation := location.AddNode(getSchemaDiffNode(paramName1, ¶m1.SimpleSchema)) + sd.checkDeletedExtensions(param1.Extensions, param2.Extensions, childLocation, "") + } + } + + // detect added changed params + for paramName2, param2 := range params2 { + // changed? + if param1, ok := params1[paramName2]; ok { + childLocation := location.AddNode(getSchemaDiffNode(paramName2, ¶m1.SimpleSchema)) + sd.checkAddedExtensions(param1.Extensions, param2.Extensions, childLocation, "") + sd.checkChangedExtensions(param1.Extensions, param2.Extensions, childLocation, "") + } + } + } +} + func (sd *SpecAnalyser) analyzeSecurityDefinitionExtensions(spec1 *spec.Swagger, spec2 *spec.Swagger) { securityDefLoc := DifferenceLocation{Node: &Node{Field: "Security Definitions"}} - for key, securityDef := range spec1.SecurityDefinitions { + for key, securityDef1 := range spec1.SecurityDefinitions { if securityDef2, ok := spec2.SecurityDefinitions[key]; ok { - sd.checkAddedExtensions(securityDef.Extensions, securityDef2.Extensions, securityDefLoc, "") + sd.checkAddedExtensions(securityDef1.Extensions, securityDef2.Extensions, securityDefLoc, "") + sd.checkChangedExtensions(securityDef1.Extensions, securityDef2.Extensions, securityDefLoc, "") } } @@ -365,6 +429,7 @@ func (sd *SpecAnalyser) analyzeSchemaExtensions(schema1, schema2 *spec.Schema, c if schema1 != nil && schema2 != nil { diffLoc := DifferenceLocation{Response: code, URL: urlMethod.Path, Method: urlMethod.Method, Node: getSchemaDiffNode("Body", schema2)} sd.checkAddedExtensions(schema1.Extensions, schema2.Extensions, diffLoc, "") + sd.checkChangedExtensions(schema1.Extensions, schema2.Extensions, diffLoc, "") sd.checkDeletedExtensions(schema1.Extensions, schema2.Extensions, diffLoc, "") if schema1.Items != nil && schema2.Items != nil { sd.analyzeSchemaExtensions(schema1.Items.Schema, schema2.Items.Schema, code, urlMethod) @@ -384,15 +449,18 @@ func (sd *SpecAnalyser) analyzeInfoExtensions() { diffLocation := DifferenceLocation{Node: &Node{Field: "Spec Info"}} sd.checkAddedExtensions(sd.Info1.Extensions, sd.Info2.Extensions, diffLocation, "") sd.checkDeletedExtensions(sd.Info1.Extensions, sd.Info2.Extensions, diffLocation, "") + sd.checkChangedExtensions(sd.Info1.Extensions, sd.Info2.Extensions, diffLocation, "") if sd.Info1.Contact != nil && sd.Info2.Contact != nil { diffLocation = DifferenceLocation{Node: &Node{Field: "Spec Info.Contact"}} sd.checkAddedExtensions(sd.Info1.Contact.Extensions, sd.Info2.Contact.Extensions, diffLocation, "") sd.checkDeletedExtensions(sd.Info1.Contact.Extensions, sd.Info2.Contact.Extensions, diffLocation, "") + sd.checkChangedExtensions(sd.Info1.Contact.Extensions, sd.Info2.Contact.Extensions, diffLocation, "") } if sd.Info1.License != nil && sd.Info2.License != nil { diffLocation = DifferenceLocation{Node: &Node{Field: "Spec Info.License"}} sd.checkAddedExtensions(sd.Info1.License.Extensions, sd.Info2.License.Extensions, diffLocation, "") sd.checkDeletedExtensions(sd.Info1.License.Extensions, sd.Info2.License.Extensions, diffLocation, "") + sd.checkChangedExtensions(sd.Info1.License.Extensions, sd.Info2.License.Extensions, diffLocation, "") } } } @@ -403,6 +471,7 @@ func (sd *SpecAnalyser) analyzeTagExtensions(spec1 *spec.Swagger, spec2 *spec.Sw for _, spec1Tag := range spec1.Tags { if spec2Tag.Name == spec1Tag.Name { sd.checkAddedExtensions(spec1Tag.Extensions, spec2Tag.Extensions, diffLocation, "") + sd.checkChangedExtensions(spec1Tag.Extensions, spec2Tag.Extensions, diffLocation, "") } } } @@ -430,6 +499,21 @@ func (sd *SpecAnalyser) checkAddedExtensions(extensions1 spec.Extensions, extens } } +func (sd *SpecAnalyser) checkChangedExtensions(extensions1 spec.Extensions, extensions2 spec.Extensions, diffLocation DifferenceLocation, fieldPrefix string) { + for extKey, ext2Val := range extensions2 { + if ext1Val, ok := extensions1[extKey]; ok && !reflect.DeepEqual(ext1Val, ext2Val) { + if fieldPrefix != "" { + extKey = fmt.Sprintf("%s.%s", fieldPrefix, extKey) + } + sd.Diffs = sd.Diffs.addDiff(SpecDifference{ + DifferenceLocation: diffLocation.AddNode(&Node{Field: extKey}), + Code: ChangedExtensionValue, + Compatibility: Warning, // this could potentially be a breaking change + }) + } + } +} + func (sd *SpecAnalyser) checkDeletedExtensions(extensions1 spec.Extensions, extensions2 spec.Extensions, diffLocation DifferenceLocation, fieldPrefix string) { for extKey := range extensions1 { if _, ok := extensions2[extKey]; !ok { @@ -746,7 +830,11 @@ func (sd *SpecAnalyser) schemaFromRef(ref spec.Ref, defns *spec.Definitions) (ac } func schemaLocationKey(location DifferenceLocation) string { - return location.Method + location.URL + location.Node.Field + location.Node.TypeName + k := location.Method + location.URL + location.Node.Field + location.Node.TypeName + if location.Node.ChildNode != nil && location.Node.ChildNode.IsArray { + k += location.Node.ChildNode.Field + location.Node.ChildNode.TypeName + } + return k } // PropertyDefn combines a property with its required-ness diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/expand.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/expand.go index d8a704673..ed1fd3bfa 100644 --- a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/expand.go +++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/expand.go @@ -63,9 +63,10 @@ func writeToFile(swspec *spec.Swagger, pretty bool, format string, output string } var bb interface{} bb, err = data.MarshalYAML() - b = bb.([]byte) + if err == nil { + b = bb.([]byte) + } } - } if err != nil { diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/model.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/model.go index fb8c14268..5dffa66ea 100644 --- a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/model.go +++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/model.go @@ -29,6 +29,7 @@ type modelOptions struct { KeepSpecOrder bool `long:"keep-spec-order" description:"keep schema properties order identical to spec file"` AllDefinitions bool `long:"all-definitions" description:"generate all model definitions regardless of usage in operations" hidden:"deprecated"` StructTags []string `long:"struct-tags" description:"the struct tags to generate, repeat for multiple (defaults to json)"` + RootedErrorPath bool `long:"rooted-error-path" description:"extends validation errors with the type name instead of an empty path, in the case of arrays and maps"` } func (mo modelOptions) apply(opts *generator.GenOpts) { @@ -39,6 +40,7 @@ func (mo modelOptions) apply(opts *generator.GenOpts) { opts.PropertiesSpecOrder = mo.KeepSpecOrder opts.IgnoreOperations = mo.AllDefinitions opts.StructTags = mo.StructTags + opts.WantsRootedErrorPath = mo.RootedErrorPath } // WithModels adds the model options group. diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/shared.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/shared.go index ab9725a7c..7eb3af3fa 100644 --- a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/shared.go +++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/shared.go @@ -17,7 +17,7 @@ // FlattenCmdOptions determines options to the flatten spec preprocessing type FlattenCmdOptions struct { WithExpand bool `long:"with-expand" description:"expands all $ref's in spec prior to generation (shorthand to --with-flatten=expand)" group:"shared"` - WithFlatten []string `long:"with-flatten" description:"flattens all $ref's in spec prior to generation" choice:"minimal" choice:"full" choice:"expand" choice:"verbose" choice:"noverbose" choice:"remove-unused" default:"minimal" default:"verbose" group:"shared"` // nolint: staticcheck + WithFlatten []string `long:"with-flatten" description:"flattens all $ref's in spec prior to generation" choice:"minimal" choice:"full" choice:"expand" choice:"verbose" choice:"noverbose" choice:"remove-unused" choice:"keep-names" default:"minimal" default:"verbose" group:"shared"` } // SetFlattenOptions builds flatten options from command line args @@ -64,6 +64,8 @@ func (f *FlattenCmdOptions) SetFlattenOptions(dflt *analysis.FlattenOpts) (res * res.Minimal = true minimalIsSet = true } + case "keep-names": + res.KeepNames = true } } return diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec.go index 3e16789b6..ed47da338 100644 --- a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec.go +++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec.go @@ -1,6 +1,3 @@ -//go:build !go1.11 -// +build !go1.11 - // Copyright 2015 go-swagger maintainers // // Licensed under the Apache License, Version 2.0 (the "License"); @@ -23,16 +20,17 @@ "os" "strings" + "github.com/go-swagger/go-swagger/codescan" + "github.com/go-openapi/loads" "github.com/go-openapi/spec" - "github.com/go-swagger/go-swagger/scan" "github.com/jessevdk/go-flags" "gopkg.in/yaml.v3" ) // SpecFile command to generate a swagger spec from a go application type SpecFile struct { - BasePath string `long:"base-path" short:"b" description:"the base path to use" default:"."` + WorkDir string `long:"work-dir" short:"w" description:"the base path to use" default:"."` BuildTags string `long:"tags" short:"t" description:"build tags" default:""` ScanModels bool `long:"scan-models" short:"m" description:"includes models that were annotated with 'swagger:model'"` Compact bool `long:"compact" description:"when present, doesn't prettify the json"` @@ -42,25 +40,32 @@ type SpecFile struct { Exclude []string `long:"exclude" short:"x" description:"exclude packages matching pattern"` IncludeTags []string `long:"include-tag" short:"" description:"include routes having specified tags (can be specified many times)"` ExcludeTags []string `long:"exclude-tag" short:"" description:"exclude routes having specified tags (can be specified many times)"` + ExcludeDeps bool `long:"exclude-deps" short:"" description:"exclude all dependencies of project"` } // Execute runs this command func (s *SpecFile) Execute(args []string) error { + if len(args) == 0 { // by default consider all the paths under the working directory + args = []string{"./..."} + } + input, err := loadSpec(string(s.Input)) if err != nil { return err } - var opts scan.Opts - opts.BasePath = s.BasePath - opts.Input = input + var opts codescan.Options + opts.Packages = args + opts.WorkDir = s.WorkDir + opts.InputSpec = input opts.ScanModels = s.ScanModels opts.BuildTags = s.BuildTags opts.Include = s.Include opts.Exclude = s.Exclude opts.IncludeTags = s.IncludeTags opts.ExcludeTags = s.ExcludeTags - swspec, err := scan.Application(opts) + opts.ExcludeDeps = s.ExcludeDeps + swspec, err := codescan.Run(&opts) if err != nil { return err } @@ -100,7 +105,7 @@ func writeToFile(swspec *spec.Swagger, pretty bool, output string) error { fmt.Println(string(b)) return nil } - return os.WriteFile(output, b, 0644) + return os.WriteFile(output, b, 0644) // #nosec } func marshalToJSONFormat(swspec *spec.Swagger, pretty bool) ([]byte, error) { diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec_go111.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec_go111.go deleted file mode 100644 index bf2295864..000000000 --- a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec_go111.go +++ /dev/null @@ -1,119 +0,0 @@ -//go:build go1.11 -// +build go1.11 - -package generate - -import ( - "encoding/json" - "fmt" - "os" - "strings" - - "github.com/go-swagger/go-swagger/codescan" - - "github.com/go-openapi/loads" - "github.com/go-openapi/spec" - "github.com/jessevdk/go-flags" - "gopkg.in/yaml.v3" -) - -// SpecFile command to generate a swagger spec from a go application -type SpecFile struct { - WorkDir string `long:"work-dir" short:"w" description:"the base path to use" default:"."` - BuildTags string `long:"tags" short:"t" description:"build tags" default:""` - ScanModels bool `long:"scan-models" short:"m" description:"includes models that were annotated with 'swagger:model'"` - Compact bool `long:"compact" description:"when present, doesn't prettify the json"` - Output flags.Filename `long:"output" short:"o" description:"the file to write to"` - Input flags.Filename `long:"input" short:"i" description:"an input swagger file with which to merge"` - Include []string `long:"include" short:"c" description:"include packages matching pattern"` - Exclude []string `long:"exclude" short:"x" description:"exclude packages matching pattern"` - IncludeTags []string `long:"include-tag" short:"" description:"include routes having specified tags (can be specified many times)"` - ExcludeTags []string `long:"exclude-tag" short:"" description:"exclude routes having specified tags (can be specified many times)"` - ExcludeDeps bool `long:"exclude-deps" short:"" description:"exclude all dependencies of project"` -} - -// Execute runs this command -func (s *SpecFile) Execute(args []string) error { - if len(args) == 0 { // by default consider all the paths under the working directory - args = []string{"./..."} - } - - input, err := loadSpec(string(s.Input)) - if err != nil { - return err - } - - var opts codescan.Options - opts.Packages = args - opts.WorkDir = s.WorkDir - opts.InputSpec = input - opts.ScanModels = s.ScanModels - opts.BuildTags = s.BuildTags - opts.Include = s.Include - opts.Exclude = s.Exclude - opts.IncludeTags = s.IncludeTags - opts.ExcludeTags = s.ExcludeTags - opts.ExcludeDeps = s.ExcludeDeps - swspec, err := codescan.Run(&opts) - if err != nil { - return err - } - - return writeToFile(swspec, !s.Compact, string(s.Output)) -} - -func loadSpec(input string) (*spec.Swagger, error) { - if fi, err := os.Stat(input); err == nil { - if fi.IsDir() { - return nil, fmt.Errorf("expected %q to be a file not a directory", input) - } - sp, err := loads.Spec(input) - if err != nil { - return nil, err - } - return sp.Spec(), nil - } - return nil, nil -} - -func writeToFile(swspec *spec.Swagger, pretty bool, output string) error { - var b []byte - var err error - - if strings.HasSuffix(output, "yml") || strings.HasSuffix(output, "yaml") { - b, err = marshalToYAMLFormat(swspec) - } else { - b, err = marshalToJSONFormat(swspec, pretty) - } - - if err != nil { - return err - } - - if output == "" { - fmt.Println(string(b)) - return nil - } - return os.WriteFile(output, b, 0644) // #nosec -} - -func marshalToJSONFormat(swspec *spec.Swagger, pretty bool) ([]byte, error) { - if pretty { - return json.MarshalIndent(swspec, "", " ") - } - return json.Marshal(swspec) -} - -func marshalToYAMLFormat(swspec *spec.Swagger) ([]byte, error) { - b, err := json.Marshal(swspec) - if err != nil { - return nil, err - } - - var jsonObj interface{} - if err := yaml.Unmarshal(b, &jsonObj); err != nil { - return nil, err - } - - return yaml.Marshal(jsonObj) -} diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/serve.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/serve.go index aeea4cedd..63705f618 100644 --- a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/serve.go +++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/serve.go @@ -23,7 +23,7 @@ type ServeCmd struct { BasePath string `long:"base-path" description:"the base path to serve the spec and UI at"` Flavor string `short:"F" long:"flavor" description:"the flavor of docs, can be swagger or redoc" default:"redoc" choice:"redoc" choice:"swagger"` DocURL string `long:"doc-url" description:"override the url which takes a url query param to render the doc ui"` - NoOpen bool `long:"no-open" description:"when present won't open the the browser to show the url"` + NoOpen bool `long:"no-open" description:"when present won't open the browser to show the url"` NoUI bool `long:"no-ui" description:"when present, only the swagger spec will be served"` Flatten bool `long:"flatten" description:"when present, flatten the swagger spec before serving it"` Port int `long:"port" short:"p" description:"the port to serve this site" env:"PORT"` diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/application.go b/vendor/github.com/go-swagger/go-swagger/codescan/application.go index 952d9fb1f..b7051ab85 100644 --- a/vendor/github.com/go-swagger/go-swagger/codescan/application.go +++ b/vendor/github.com/go-swagger/go-swagger/codescan/application.go @@ -163,7 +163,7 @@ func (d *entityDecl) ResponseNames() (name, goName string) { return } -func (d *entityDecl) OperationIDS() (result []string) { +func (d *entityDecl) OperationIDs() (result []string) { if d == nil || d.Comments == nil { return nil } @@ -281,7 +281,6 @@ func (s *scanCtx) FindDecl(pkgPath, name string) (*entityDecl, bool) { } return decl, true } - } } } @@ -399,7 +398,7 @@ func (s *scanCtx) FindEnumValues(pkg *packages.Package, enumName string) (list [ } for i, doc := range vs.Doc.List { if doc.Text != "" { - var text = strings.TrimPrefix(doc.Text, "//") + text := strings.TrimPrefix(doc.Text, "//") desc.WriteString(text) if i < docListLen-1 { desc.WriteString(" ") @@ -419,10 +418,7 @@ func (s *scanCtx) FindEnumValues(pkg *packages.Package, enumName string) (list [ return list, descList, true } -func newTypeIndex(pkgs []*packages.Package, - excludeDeps bool, includeTags, excludeTags map[string]bool, - includePkgs, excludePkgs []string) (*typeIndex, error) { - +func newTypeIndex(pkgs []*packages.Package, excludeDeps bool, includeTags, excludeTags map[string]bool, includePkgs, excludePkgs []string) (*typeIndex, error) { ac := &typeIndex{ AllPackages: make(map[string]*packages.Package), Models: make(map[*ast.Ident]*entityDecl), diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/operations.go b/vendor/github.com/go-swagger/go-swagger/codescan/operations.go index c6a194526..b5caedc2f 100644 --- a/vendor/github.com/go-swagger/go-swagger/codescan/operations.go +++ b/vendor/github.com/go-swagger/go-swagger/codescan/operations.go @@ -29,10 +29,10 @@ func (o *operationsBuilder) Build(tgt *spec.Paths) error { sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) } if err := sp.Parse(o.path.Remaining); err != nil { - return fmt.Errorf("operation (%s): %v", op.ID, err) + return fmt.Errorf("operation (%s): %w", op.ID, err) } if err := sp.UnmarshalSpec(op.UnmarshalJSON); err != nil { - return fmt.Errorf("operation (%s): %v", op.ID, err) + return fmt.Errorf("operation (%s): %w", op.ID, err) } if tgt.Paths == nil { diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/parameters.go b/vendor/github.com/go-swagger/go-swagger/codescan/parameters.go index b00916825..9a0b77ca0 100644 --- a/vendor/github.com/go-swagger/go-swagger/codescan/parameters.go +++ b/vendor/github.com/go-swagger/go-swagger/codescan/parameters.go @@ -8,8 +8,6 @@ "golang.org/x/tools/go/ast/astutil" - "github.com/pkg/errors" - "github.com/go-openapi/spec" ) @@ -117,6 +115,7 @@ func (sv paramValidations) SetMaximum(val float64, exclusive bool) { sv.current.Maximum = &val sv.current.ExclusiveMaximum = exclusive } + func (sv paramValidations) SetMinimum(val float64, exclusive bool) { sv.current.Minimum = &val sv.current.ExclusiveMinimum = exclusive @@ -143,6 +142,7 @@ func (sv itemsValidations) SetMaximum(val float64, exclusive bool) { sv.current.Maximum = &val sv.current.ExclusiveMaximum = exclusive } + func (sv itemsValidations) SetMinimum(val float64, exclusive bool) { sv.current.Minimum = &val sv.current.ExclusiveMinimum = exclusive @@ -168,12 +168,11 @@ type parameterBuilder struct { } func (p *parameterBuilder) Build(operations map[string]*spec.Operation) error { - // check if there is a swagger:parameters tag that is followed by one or more words, // these words are the ids of the operations this parameter struct applies to // once type name is found convert it to a schema, by looking up the schema in the // parameters dictionary that got passed into this parse method - for _, opid := range p.decl.OperationIDS() { + for _, opid := range p.decl.OperationIDs() { operation, ok := operations[opid] if !ok { operation = new(spec.Operation) @@ -210,10 +209,10 @@ func (p *parameterBuilder) buildFromType(otpe types.Type, op *spec.Operation, se } return p.buildFromStruct(p.decl, stpe, op, seen) default: - return errors.Errorf("unhandled type (%T): %s", stpe, o.Type().Underlying().String()) + return fmt.Errorf("unhandled type (%T): %s", stpe, o.Type().Underlying().String()) } default: - return errors.Errorf("unhandled type (%T): %s", otpe, tpe.String()) + return fmt.Errorf("unhandled type (%T): %s", otpe, tpe.String()) } } @@ -279,9 +278,9 @@ func (p *parameterBuilder) buildFromField(fld *types.Var, tpe types.Type, typabl p.postDecls = append(p.postDecls, sb.postDecls...) return nil } - return errors.Errorf("unable to find package and source file for: %s", ftpe.String()) + return fmt.Errorf("unable to find package and source file for: %s", ftpe.String()) default: - return errors.Errorf("unknown type for %s: %T", fld.String(), fld.Type()) + return fmt.Errorf("unknown type for %s: %T", fld.String(), fld.Type()) } } diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/parser.go b/vendor/github.com/go-swagger/go-swagger/codescan/parser.go index 9637e6c22..3733d50df 100644 --- a/vendor/github.com/go-swagger/go-swagger/codescan/parser.go +++ b/vendor/github.com/go-swagger/go-swagger/codescan/parser.go @@ -2,8 +2,10 @@ import ( "encoding/json" + "errors" "fmt" "go/ast" + "log" "reflect" "regexp" "strconv" @@ -11,7 +13,6 @@ "github.com/go-openapi/loads/fmts" "github.com/go-openapi/spec" - "github.com/pkg/errors" "gopkg.in/yaml.v3" ) @@ -1466,7 +1467,7 @@ func (ss *setOpResponses) Parse(lines []string) error { return nil } -func parseEnum(val string, s *spec.SimpleSchema) []interface{} { +func parseEnumOld(val string, s *spec.SimpleSchema) []interface{} { list := strings.Split(val, ",") interfaceSlice := make([]interface{}, len(list)) for i, d := range list { @@ -1481,6 +1482,35 @@ func parseEnum(val string, s *spec.SimpleSchema) []interface{} { return interfaceSlice } +func parseEnum(val string, s *spec.SimpleSchema) []interface{} { + // obtain the raw elements of the list to latter process them with the parseValueFromSchema + var rawElements []json.RawMessage + if err := json.Unmarshal([]byte(val), &rawElements); err != nil { + log.Print("WARNING: item list for enum is not a valid JSON array, using the old deprecated format") + return parseEnumOld(val, s) + } + + interfaceSlice := make([]interface{}, len(rawElements)) + + for i, d := range rawElements { + + ds, err := strconv.Unquote(string(d)) + if err != nil { + ds = string(d) + } + + v, err := parseValueFromSchema(ds, s) + if err != nil { + interfaceSlice[i] = ds + continue + } + + interfaceSlice[i] = v + } + + return interfaceSlice +} + // AlphaChars used when parsing for Vendor Extensions const AlphaChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers.go b/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers.go index 6ffac76af..a9f8be8ce 100644 --- a/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers.go +++ b/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers.go @@ -1,6 +1,3 @@ -//go:build go1.19 -// +build go1.19 - package codescan import ( diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers_go118.go b/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers_go118.go deleted file mode 100644 index 62eb59a96..000000000 --- a/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers_go118.go +++ /dev/null @@ -1,42 +0,0 @@ -//go:build !go1.19 -// +build !go1.19 - -package codescan - -import "strings" - -// a shared function that can be used to split given headers -// into a title and description -func collectScannerTitleDescription(headers []string) (title, desc []string) { - hdrs := cleanupScannerLines(headers, rxUncommentHeaders, nil) - - idx := -1 - for i, line := range hdrs { - if strings.TrimSpace(line) == "" { - idx = i - break - } - } - - if idx > -1 { - title = hdrs[:idx] - if len(hdrs) > idx+1 { - desc = hdrs[idx+1:] - } else { - desc = nil - } - return - } - - if len(hdrs) > 0 { - line := hdrs[0] - if rxPunctuationEnd.MatchString(line) { - title = []string{line} - desc = hdrs[1:] - } else { - desc = hdrs - } - } - - return -} diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/responses.go b/vendor/github.com/go-swagger/go-swagger/codescan/responses.go index 350cd3a7b..95dff0f85 100644 --- a/vendor/github.com/go-swagger/go-swagger/codescan/responses.go +++ b/vendor/github.com/go-swagger/go-swagger/codescan/responses.go @@ -1,16 +1,14 @@ package codescan import ( + "errors" "fmt" "go/ast" "go/types" "strings" - "github.com/pkg/errors" - - "golang.org/x/tools/go/ast/astutil" - "github.com/go-openapi/spec" + "golang.org/x/tools/go/ast/astutil" ) type responseTypable struct { @@ -97,22 +95,50 @@ func (sv headerValidations) SetMaximum(val float64, exclusive bool) { sv.current.Maximum = &val sv.current.ExclusiveMaximum = exclusive } + func (sv headerValidations) SetMinimum(val float64, exclusive bool) { sv.current.Minimum = &val sv.current.ExclusiveMinimum = exclusive } -func (sv headerValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val } -func (sv headerValidations) SetMinItems(val int64) { sv.current.MinItems = &val } -func (sv headerValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val } -func (sv headerValidations) SetMinLength(val int64) { sv.current.MinLength = &val } -func (sv headerValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val } -func (sv headerValidations) SetPattern(val string) { sv.current.Pattern = val } -func (sv headerValidations) SetUnique(val bool) { sv.current.UniqueItems = val } -func (sv headerValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val } + +func (sv headerValidations) SetMultipleOf(val float64) { + sv.current.MultipleOf = &val +} + +func (sv headerValidations) SetMinItems(val int64) { + sv.current.MinItems = &val +} + +func (sv headerValidations) SetMaxItems(val int64) { + sv.current.MaxItems = &val +} + +func (sv headerValidations) SetMinLength(val int64) { + sv.current.MinLength = &val +} + +func (sv headerValidations) SetMaxLength(val int64) { + sv.current.MaxLength = &val +} + +func (sv headerValidations) SetPattern(val string) { + sv.current.Pattern = val +} + +func (sv headerValidations) SetUnique(val bool) { + sv.current.UniqueItems = val +} + +func (sv headerValidations) SetCollectionFormat(val string) { + sv.current.CollectionFormat = val +} + func (sv headerValidations) SetEnum(val string) { sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format}) } + func (sv headerValidations) SetDefault(val interface{}) { sv.current.Default = val } + func (sv headerValidations) SetExample(val interface{}) { sv.current.Example = val } type responseBuilder struct { @@ -215,9 +241,9 @@ func (r *responseBuilder) buildFromField(fld *types.Var, tpe types.Type, typable r.postDecls = append(r.postDecls, sb.postDecls...) return nil } - return errors.Errorf("unable to find package and source file for: %s", ftpe.String()) + return fmt.Errorf("unable to find package and source file for: %s", ftpe.String()) default: - return errors.Errorf("unknown type for %s: %T", fld.String(), fld.Type()) + return fmt.Errorf("unknown type for %s: %T", fld.String(), fld.Type()) } } @@ -256,7 +282,7 @@ func (r *responseBuilder) buildFromType(otpe types.Type, resp *spec.Response, se r.postDecls = append(r.postDecls, sb.postDecls...) return nil } - return errors.Errorf("responses can only be structs, did you mean for %s to be the response body?", otpe.String()) + return fmt.Errorf("responses can only be structs, did you mean for %s to be the response body?", otpe.String()) } default: return errors.New("anonymous types are currently not supported for responses") diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/routes.go b/vendor/github.com/go-swagger/go-swagger/codescan/routes.go index af58e43f3..20cbf2c7b 100644 --- a/vendor/github.com/go-swagger/go-swagger/codescan/routes.go +++ b/vendor/github.com/go-swagger/go-swagger/codescan/routes.go @@ -58,7 +58,6 @@ type routesBuilder struct { } func (r *routesBuilder) Build(tgt *spec.Paths) error { - pthObj := tgt.Paths[r.route.Path] op := setPathOperation( r.route.Method, r.route.ID, @@ -82,7 +81,7 @@ func (r *routesBuilder) Build(tgt *spec.Paths) error { newMultiLineTagParser("Extensions", newSetExtensions(opExtensionsSetter(op)), true), } if err := sp.Parse(r.route.Remaining); err != nil { - return fmt.Errorf("operation (%s): %v", op.ID, err) + return fmt.Errorf("operation (%s): %w", op.ID, err) } if tgt.Paths == nil { diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/schema.go b/vendor/github.com/go-swagger/go-swagger/codescan/schema.go index 8c6723040..dce74fc30 100644 --- a/vendor/github.com/go-swagger/go-swagger/codescan/schema.go +++ b/vendor/github.com/go-swagger/go-swagger/codescan/schema.go @@ -2,6 +2,7 @@ import ( "encoding/json" + "errors" "fmt" "go/ast" "go/importer" @@ -15,7 +16,6 @@ "golang.org/x/tools/go/ast/astutil" "github.com/go-openapi/spec" - "github.com/pkg/errors" ) func addExtension(ve *spec.VendorExtensible, key string, value interface{}) { @@ -92,6 +92,7 @@ func (sv schemaValidations) SetMaximum(val float64, exclusive bool) { sv.current.Maximum = &val sv.current.ExclusiveMaximum = exclusive } + func (sv schemaValidations) SetMinimum(val float64, exclusive bool) { sv.current.Minimum = &val sv.current.ExclusiveMinimum = exclusive @@ -358,9 +359,12 @@ func (s *schemaBuilder) buildFromType(tpe types.Type, tgt swaggerTypable) error return nil } + if s.decl.Spec.Assign.IsValid() { + return s.buildFromType(titpe.Underlying(), tgt) + } + switch utitpe := tpe.Underlying().(type) { case *types.Struct: - if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok { if decl.Type.Obj().Pkg().Path() == "time" && decl.Type.Obj().Name() == "Time" { tgt.Typed("string", "date-time") @@ -892,7 +896,7 @@ func (s *schemaBuilder) buildAllOf(tpe types.Type, schema *spec.Schema) error { } return s.buildFromStruct(decl, utpe, schema, make(map[string]string)) } - return errors.Errorf("can't find source file for struct: %s", ftpe.String()) + return fmt.Errorf("can't find source file for struct: %s", ftpe.String()) case *types.Interface: decl, found := s.ctx.FindModel(ftpe.Obj().Pkg().Path(), ftpe.Obj().Name()) if found { @@ -905,7 +909,7 @@ func (s *schemaBuilder) buildAllOf(tpe types.Type, schema *spec.Schema) error { } return s.buildFromInterface(decl, utpe, schema, make(map[string]string)) } - return errors.Errorf("can't find source file for interface: %s", ftpe.String()) + return fmt.Errorf("can't find source file for interface: %s", ftpe.String()) default: log.Printf("WARNING: can't figure out object type for allOf named type (%T): %v", ftpe, ftpe.Underlying()) return fmt.Errorf("unable to locate source file for allOf %s", utpe.String()) @@ -929,13 +933,13 @@ func (s *schemaBuilder) buildEmbedded(tpe types.Type, schema *spec.Schema, seen if found { return s.buildFromStruct(decl, utpe, schema, seen) } - return errors.Errorf("can't find source file for struct: %s", ftpe.String()) + return fmt.Errorf("can't find source file for struct: %s", ftpe.String()) case *types.Interface: decl, found := s.ctx.FindModel(ftpe.Obj().Pkg().Path(), ftpe.Obj().Name()) if found { return s.buildFromInterface(decl, utpe, schema, seen) } - return errors.Errorf("can't find source file for struct: %s", ftpe.String()) + return fmt.Errorf("can't find source file for struct: %s", ftpe.String()) default: log.Printf("WARNING: can't figure out object type for embedded named type (%T): %v", ftpe, ftpe.Underlying()) } diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/spec.go b/vendor/github.com/go-swagger/go-swagger/codescan/spec.go index 726787c11..20c4e1022 100644 --- a/vendor/github.com/go-swagger/go-swagger/codescan/spec.go +++ b/vendor/github.com/go-swagger/go-swagger/codescan/spec.go @@ -54,7 +54,7 @@ func (s *specBuilder) Build() (*spec.Swagger, error) { return nil, err } - if err := s.buildRespones(); err != nil { + if err := s.buildResponses(); err != nil { return nil, err } @@ -160,7 +160,7 @@ func (s *specBuilder) buildRoutes() error { return nil } -func (s *specBuilder) buildRespones() error { +func (s *specBuilder) buildResponses() error { // build responses dictionary for _, decl := range s.ctx.app.Responses { rb := &responseBuilder{ diff --git a/vendor/github.com/go-swagger/go-swagger/generator/bindata.go b/vendor/github.com/go-swagger/go-swagger/generator/bindata.go index 379362734..d6779ba61 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/bindata.go +++ b/vendor/github.com/go-swagger/go-swagger/generator/bindata.go @@ -11,7 +11,7 @@ // AssetNames returns the names of the assets. func AssetNames() []string { names := make([]string, 0) - _ = fs.WalkDir(_bindata, "templates", func(path string, d fs.DirEntry, err error) error { + _ = fs.WalkDir(_bindata, "templates", func(path string, _ fs.DirEntry, err error) error { if err != nil { return err } diff --git a/vendor/github.com/go-swagger/go-swagger/generator/config.go b/vendor/github.com/go-swagger/go-swagger/generator/config.go index 2d9413218..a565cb203 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/config.go +++ b/vendor/github.com/go-swagger/go-swagger/generator/config.go @@ -1,6 +1,7 @@ package generator import ( + "errors" "fmt" "os" "path/filepath" @@ -53,7 +54,8 @@ func ReadConfig(fpath string) (*viper.Viper, error) { v.SetConfigName(".swagger") v.AddConfigPath(".") if err := v.ReadInConfig(); err != nil { - if _, ok := err.(viper.UnsupportedConfigError); !ok && v.ConfigFileUsed() != "" { + var e viper.UnsupportedConfigError + if !errors.As(err, &e) && v.ConfigFileUsed() != "" { return nil, err } } diff --git a/vendor/github.com/go-swagger/go-swagger/generator/formats.go b/vendor/github.com/go-swagger/go-swagger/generator/formats.go index 3d127333f..121679ace 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/formats.go +++ b/vendor/github.com/go-swagger/go-swagger/generator/formats.go @@ -57,6 +57,7 @@ "strfmt.UUID3": "strfmt.UUID3(\"\")", "strfmt.UUID4": "strfmt.UUID4(\"\")", "strfmt.UUID5": "strfmt.UUID5(\"\")", + "strfmt.ULID": "strfmt.ULID(\"\")", // "file": "runtime.File", } @@ -165,6 +166,7 @@ "uuid3": "strfmt.UUID3", "uuid4": "strfmt.UUID4", "uuid5": "strfmt.UUID5", + "ulid": "strfmt.ULID", // For file producers "file": "runtime.File", }, diff --git a/vendor/github.com/go-swagger/go-swagger/generator/genopts_nonwin.go b/vendor/github.com/go-swagger/go-swagger/generator/genopts_nonwin.go index 7e2a4f1c0..fd1d0aaa1 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/genopts_nonwin.go +++ b/vendor/github.com/go-swagger/go-swagger/generator/genopts_nonwin.go @@ -34,13 +34,11 @@ func (t *Repository) LoadPlugin(pluginPath string) error { log.Printf("Attempting to load template plugin: %s", pluginPath) p, err := plugin.Open(pluginPath) - if err != nil { return err } f, err := p.Lookup("AddFuncs") - if err != nil { return err } diff --git a/vendor/github.com/go-swagger/go-swagger/generator/language.go b/vendor/github.com/go-swagger/go-swagger/generator/language.go index 01c7a318e..68e911663 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/language.go +++ b/vendor/github.com/go-swagger/go-swagger/generator/language.go @@ -141,7 +141,7 @@ func (l *LanguageOpts) baseImport(tgt string) string { // GoLangOpts for rendering items as golang code func GoLangOpts() *LanguageOpts { - var goOtherReservedSuffixes = map[string]bool{ + goOtherReservedSuffixes := map[string]bool{ // see: // https://golang.org/src/go/build/syslist.go // https://golang.org/doc/install/source#environment @@ -154,6 +154,7 @@ func GoLangOpts() *LanguageOpts { "freebsd": true, "hurd": true, "illumos": true, + "ios": true, "js": true, "linux": true, "nacl": true, @@ -172,6 +173,7 @@ func GoLangOpts() *LanguageOpts { "armbe": true, "arm64": true, "arm64be": true, + "loong64": true, "mips": true, "mipsle": true, "mips64": true, @@ -436,5 +438,4 @@ func checkPrefixAndFetchRelativePath(childpath string, parentpath string) (bool, } return false, "" - } diff --git a/vendor/github.com/go-swagger/go-swagger/generator/media.go b/vendor/github.com/go-swagger/go-swagger/generator/media.go index f9dad9fa4..239926dc8 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/media.go +++ b/vendor/github.com/go-swagger/go-swagger/generator/media.go @@ -71,6 +71,10 @@ func mediaMime(orig string) string { return strings.SplitN(orig, ";", 2)[0] } +func mediaGoName(media string) string { + return pascalize(strings.ReplaceAll(media, "*", "Star")) +} + func mediaParameters(orig string) string { parts := strings.SplitN(orig, ";", 2) if len(parts) < 2 { diff --git a/vendor/github.com/go-swagger/go-swagger/generator/model.go b/vendor/github.com/go-swagger/go-swagger/generator/model.go index 132927d48..ca5b87a09 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/model.go +++ b/vendor/github.com/go-swagger/go-swagger/generator/model.go @@ -120,10 +120,9 @@ type definitionGenerator struct { } func (m *definitionGenerator) Generate() error { - mod, err := makeGenDefinition(m.Name, m.Target, m.Model, m.SpecDoc, m.opts) if err != nil { - return fmt.Errorf("could not generate definitions for model %s on target %s: %v", m.Name, m.Target, err) + return fmt.Errorf("could not generate definitions for model %s on target %s: %w", m.Name, m.Target, err) } if m.opts.DumpData { @@ -133,7 +132,7 @@ func (m *definitionGenerator) Generate() error { if m.opts.IncludeModel { log.Println("including additional model") if err := m.generateModel(mod); err != nil { - return fmt.Errorf("could not generate model: %v", err) + return fmt.Errorf("could not generate model: %w", err) } } log.Println("generated model", m.Name) @@ -255,9 +254,10 @@ func makeGenDefinitionHierarchy(name, pkg, container string, schema spec.Schema, StrictAdditionalProperties: opts.StrictAdditionalProperties, WithXML: opts.WithXML, StructTags: opts.StructTags, + WantsRootedErrorPath: opts.WantsRootedErrorPath, } if err := pg.makeGenSchema(); err != nil { - return nil, fmt.Errorf("could not generate schema for %s: %v", name, err) + return nil, fmt.Errorf("could not generate schema for %s: %w", name, err) } dsi, ok := di.Discriminators["#/definitions/"+name] if ok { @@ -358,6 +358,7 @@ func makeGenDefinitionHierarchy(name, pkg, container string, schema spec.Schema, "runtime": "github.com/go-openapi/runtime", "swag": "github.com/go-openapi/swag", "validate": "github.com/go-openapi/validate", + "strfmt": "github.com/go-openapi/strfmt", } return &GenDefinition{ @@ -442,12 +443,12 @@ type schemaGenContext struct { AdditionalProperty bool Untyped bool Named bool - RefHandled bool IsVirtual bool IsTuple bool IncludeValidator bool IncludeModel bool StrictAdditionalProperties bool + WantsRootedErrorPath bool WithXML bool Index int @@ -473,6 +474,10 @@ type schemaGenContext struct { // force to use container in inlined definitions (for deconflicting) UseContainerInName bool + // indicates is the schema is part of a slice or a map + IsElem bool + // indicates is the schema is part of a struct + IsProperty bool } func (sg *schemaGenContext) NewSliceBranch(schema *spec.Schema) *schemaGenContext { @@ -500,6 +505,7 @@ func (sg *schemaGenContext) NewSliceBranch(schema *spec.Schema) *schemaGenContex pg.ValueExpr = pg.ValueExpr + "[" + indexVar + "]" pg.Schema = *schema pg.Required = false + pg.IsElem = true if sg.IsVirtual { pg.TypeResolver = sg.TypeResolver.NewWithModelName(sg.TypeResolver.ModelName) } @@ -566,6 +572,7 @@ func (sg *schemaGenContext) NewStructBranch(name string, schema spec.Schema) *sc pg.Name = name pg.ValueExpr = pg.ValueExpr + "." + pascalize(goName(&schema, name)) pg.Schema = schema + pg.IsProperty = true for _, fn := range sg.Schema.Required { if name == fn { pg.Required = true @@ -621,6 +628,7 @@ func (sg *schemaGenContext) NewAdditionalProperty(schema spec.Schema) *schemaGen if sg.Path != "" { pg.Path = sg.Path + "+\".\"+" + pg.KeyVar } + pg.IsElem = true // propagates the special IsNullable override for maps of slices and // maps of aliased types. pg.GenSchema.IsMapNullOverride = sg.GenSchema.IsMapNullOverride @@ -680,7 +688,7 @@ func (sg *schemaGenContext) schemaValidations() sharedValidations { // when readOnly or default is specified, this disables Required validation (Swagger-specific) isRequired = false if sg.Required { - log.Printf("warn: properties with a default value or readOnly should not be required [%s]", sg.Name) + log.Printf("warning: properties with a default value or readOnly should not be required [%s]", sg.Name) } } @@ -841,7 +849,7 @@ func (sg *schemaGenContext) buildProperties() error { } // set property name - var nm = filepath.Base(emprop.Schema.Ref.GetURL().Fragment) + nm := filepath.Base(emprop.Schema.Ref.GetURL().Fragment) tr := sg.TypeResolver.NewWithModelName(goName(&emprop.Schema, swag.ToGoName(nm))) ttpe, err := tr.ResolveSchema(sch, false, true) @@ -1228,7 +1236,13 @@ func (mt *mapStack) Dict() map[string]interface{} { func (sg *schemaGenContext) buildAdditionalProperties() error { if sg.Schema.AdditionalProperties == nil { - return nil + if sg.Schema.MinProperties == nil && sg.Schema.MaxProperties == nil { + return nil + } + + // whenever there is a validation on min/max properties and no additionalProperties is defined, + // we imply additionalProperties: true (corresponds to jsonschema defaults). + sg.Schema.AdditionalProperties = &spec.SchemaOrBool{Allows: true} } addp := *sg.Schema.AdditionalProperties @@ -1256,7 +1270,9 @@ func (sg *schemaGenContext) buildAdditionalProperties() error { sg.GenSchema.IsComplexObject = false sg.GenSchema.IsMap = true - sg.GenSchema.ValueExpression += "." + swag.ToGoName(sg.Name+" additionalProperties") + if !sg.IsElem && !sg.IsProperty { + sg.GenSchema.ValueExpression += "." + swag.ToGoName(sg.Name+" additionalProperties") + } cp := sg.NewAdditionalProperty(*addp.Schema) cp.Name += "AdditionalProperties" cp.Required = false @@ -1325,7 +1341,6 @@ func (sg *schemaGenContext) buildAdditionalProperties() error { if err := comprop.makeGenSchema(); err != nil { return err } - sg.MergeResult(comprop, false) sg.GenSchema.AdditionalProperties = &comprop.GenSchema sg.GenSchema.AdditionalProperties.ValueExpression = sg.GenSchema.ValueExpression + "[" + comprop.KeyVar + "]" @@ -1598,9 +1613,8 @@ func (sg *schemaGenContext) buildItems() error { } func (sg *schemaGenContext) buildAdditionalItems() error { - wantsAdditionalItems := - sg.Schema.AdditionalItems != nil && - (sg.Schema.AdditionalItems.Allows || sg.Schema.AdditionalItems.Schema != nil) + wantsAdditionalItems := sg.Schema.AdditionalItems != nil && + (sg.Schema.AdditionalItems.Allows || sg.Schema.AdditionalItems.Schema != nil) sg.GenSchema.HasAdditionalItems = wantsAdditionalItems if wantsAdditionalItems { @@ -1672,8 +1686,7 @@ func (sg *schemaGenContext) shortCircuitNamedRef() (bool, error) { // NOTE: this assumes that all $ref point to a definition, // i.e. the spec is canonical, as guaranteed by minimal flattening. // - // TODO: RefHandled is actually set nowhere - if sg.RefHandled || !sg.Named || sg.Schema.Ref.String() == "" { + if !sg.Named || sg.Schema.Ref.String() == "" { return false, nil } debugLogAsJSON("short circuit named ref: %q", sg.Schema.Ref.String(), sg.Schema) @@ -1684,6 +1697,8 @@ func (sg *schemaGenContext) shortCircuitNamedRef() (bool, error) { // check if the $ref points to a simple type or polymorphic (base) type. // // If this is the case, just realias this simple type, without creating a struct. + // + // In templates this case is identified by .IsSuperAlias = true asch, era := analysis.Schema(analysis.SchemaOpts{ Root: sg.TypeResolver.Doc.Spec(), BasePath: sg.TypeResolver.Doc.SpecFilePath(), @@ -1734,10 +1749,16 @@ func (sg *schemaGenContext) shortCircuitNamedRef() (bool, error) { } // Aliased object: use golang struct composition. + // Covers case of a type redefinition like: + // thistype: + // $ref: #/definitions/othertype + // // This is rendered as a struct with type field, i.e. : // Alias struct { // AliasedType // } + // + // In templates, the schema is composed like AllOf. nullableOverride := sg.GenSchema.IsNullable tpe := resolvedType{} @@ -1750,17 +1771,26 @@ func (sg *schemaGenContext) shortCircuitNamedRef() (bool, error) { tpe.IsAnonymous = false tpe.IsNullable = sg.TypeResolver.isNullable(&sg.Schema) - item := sg.NewCompositionBranch(sg.Schema, 0) - if err := item.makeGenSchema(); err != nil { + branch := sg.NewCompositionBranch(sg.Schema, 0) + if err := branch.makeGenSchema(); err != nil { return true, err } sg.GenSchema.resolvedType = tpe sg.GenSchema.IsNullable = sg.GenSchema.IsNullable || nullableOverride // prevent format from bubbling up in composed type - item.GenSchema.IsCustomFormatter = false + branch.GenSchema.IsCustomFormatter = false + + sg.MergeResult(branch, true) + + tpx, ers := sg.TypeResolver.ResolveSchema(&sg.Schema, false, true) + if ers != nil { + return false, ers + } + // we don't know the actual validation status yet. So assume true, + // unless we can infer that no Validate() method will be present + branch.GenSchema.HasValidations = !tpx.IsInterface && !tpx.IsStream + sg.GenSchema.AllOf = append(sg.GenSchema.AllOf, branch.GenSchema) - sg.MergeResult(item, true) - sg.GenSchema.AllOf = append(sg.GenSchema.AllOf, item.GenSchema) return true, nil } @@ -1967,6 +1997,9 @@ func (sg *schemaGenContext) makeGenSchema() error { sg.GenSchema.Default = sg.Schema.Default sg.GenSchema.StructTags = sg.StructTags sg.GenSchema.ExtraImports = make(map[string]string) + sg.GenSchema.WantsRootedErrorPath = sg.WantsRootedErrorPath + sg.GenSchema.IsElem = sg.IsElem + sg.GenSchema.IsProperty = sg.IsProperty var err error returns, err := sg.shortCircuitNamedRef() @@ -1974,6 +2007,7 @@ func (sg *schemaGenContext) makeGenSchema() error { return err } if returns { + // short circuited on a resolved $ref return nil } debugLogAsJSON("after short circuit named ref", sg.Schema) @@ -2035,6 +2069,8 @@ func (sg *schemaGenContext) makeGenSchema() error { log.Printf("INFO: type %s is external, with inferred spec type %s, referred to as %s", sg.GenSchema.Name, sg.GenSchema.GoType, extType) sg.GenSchema.GoType = extType sg.GenSchema.AliasedType = extType + + // short circuit schema building for external types return nil } // TODO: case for embedded types as anonymous definitions @@ -2073,6 +2109,8 @@ func (sg *schemaGenContext) makeGenSchema() error { sg.GenSchema.IsMap = prev.IsMap sg.GenSchema.IsAdditionalProperties = prev.IsAdditionalProperties sg.GenSchema.IsBaseType = sg.GenSchema.HasDiscriminator + sg.GenSchema.IsElem = prev.IsElem + sg.GenSchema.IsProperty = prev.IsProperty debugLogAsJSON("gschema nnullable:IsNullable:%t,resolver.IsNullable:%t,nullableOverride:%t", sg.GenSchema.IsNullable, otn, nullableOverride, sg.Schema) @@ -2114,5 +2152,6 @@ func (sg *schemaGenContext) makeGenSchema() error { (gs.IsTuple || gs.IsComplexObject || gs.IsAdditionalProperties || (gs.IsPrimitive && gs.IsAliased && gs.IsCustomFormatter && !strings.Contains(gs.Zero(), `("`))) debugLog("finished gen schema for %q", sg.Name) + return nil } diff --git a/vendor/github.com/go-swagger/go-swagger/generator/operation.go b/vendor/github.com/go-swagger/go-swagger/generator/operation.go index 8f4b8b2f6..e606e5143 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/operation.go +++ b/vendor/github.com/go-swagger/go-swagger/generator/operation.go @@ -18,7 +18,9 @@ "encoding/json" "errors" "fmt" + "log" "path/filepath" + "regexp" "sort" "strings" @@ -149,7 +151,6 @@ type operationGenerator struct { // Generate a single operation func (o *operationGenerator) Generate() error { - defaultImports := o.GenOpts.defaultImports() apiPackage := o.GenOpts.LanguageOpts.ManglePackagePath(o.GenOpts.APIPackage, defaultOperationsTarget) @@ -164,6 +165,7 @@ func (o *operationGenerator) Generate() error { Imports: imports, DefaultScheme: o.DefaultScheme, Doc: o.Doc, + PristineDefs: o.Doc.Pristine(), Analyzed: o.Analyzed, BasePath: o.BasePath, GenOpts: o.GenOpts, @@ -223,7 +225,7 @@ type codeGenOpBuilder struct { Target string Operation spec.Operation Doc *loads.Document - PristineDoc *loads.Document + PristineDefs *loads.Document Analyzed *analysis.Spec DefaultImports map[string]string Imports map[string]string @@ -245,12 +247,24 @@ func paramMappings(params map[string]spec.Parameter) (map[string]map[string]stri "header": make(map[string]string, len(params)), "body": make(map[string]string, len(params)), } + debugLog("paramMappings: map=%v", params) // In order to avoid unstable generation, adopt same naming convention // for all parameters with same name across locations. - seenIds := make(map[string]interface{}, len(params)) + seenIDs := make(map[string]interface{}, len(params)) for id, p := range params { - if val, ok := seenIds[p.Name]; ok { + debugLog("paramMappings: params: id=%s, In=%q, Name=%q", id, p.In, p.Name) + // guard against possible validation failures and/or skipped issues + if _, found := idMapping[p.In]; !found { + log.Printf(`warning: parameter named %q has an invalid "in": %q. Skipped`, p.Name, p.In) + continue + } + if p.Name == "" { + log.Printf(`warning: unnamed parameter (%+v). Skipped`, p) + continue + } + + if val, ok := seenIDs[p.Name]; ok { previous := val.(struct{ id, in string }) idMapping[p.In][p.Name] = swag.ToGoName(id) // rewrite the previously found one @@ -258,11 +272,11 @@ func paramMappings(params map[string]spec.Parameter) (map[string]map[string]stri } else { idMapping[p.In][p.Name] = swag.ToGoName(p.Name) } - seenIds[strings.ToLower(idMapping[p.In][p.Name])] = struct{ id, in string }{id: id, in: p.In} + seenIDs[strings.ToLower(idMapping[p.In][p.Name])] = struct{ id, in string }{id: id, in: p.In} } // pick a deconflicted private name for timeout for this operation - timeoutName := renameTimeout(seenIds, "timeout") + timeoutName := renameTimeout(seenIDs, "timeout") return idMapping, timeoutName } @@ -272,12 +286,12 @@ func paramMappings(params map[string]spec.Parameter) (map[string]map[string]stri // // NOTE: this merely protects the timeout field in the client parameter struct, // fields "Context" and "HTTPClient" remain exposed to name conflicts. -func renameTimeout(seenIds map[string]interface{}, timeoutName string) string { - if seenIds == nil { +func renameTimeout(seenIDs map[string]interface{}, timeoutName string) string { + if seenIDs == nil { return timeoutName } current := strings.ToLower(timeoutName) - if _, ok := seenIds[current]; !ok { + if _, ok := seenIDs[current]; !ok { return timeoutName } var next string @@ -297,7 +311,7 @@ func renameTimeout(seenIds map[string]interface{}, timeoutName string) string { default: next = timeoutName + "1" } - return renameTimeout(seenIds, next) + return renameTimeout(seenIDs, next) } func (b *codeGenOpBuilder) MakeOperation() (GenOperation, error) { @@ -325,7 +339,6 @@ func (b *codeGenOpBuilder) MakeOperation() (GenOperation, error) { for _, p := range paramsForOperation { cp, err := b.MakeParameter(receiver, resolver, p, idMapping) - if err != nil { return GenOperation{}, err } @@ -417,10 +430,7 @@ func (b *codeGenOpBuilder) MakeOperation() (GenOperation, error) { originalExtraSchemes := getExtraSchemes(operation.Extensions) produces := producesOrDefault(operation.Produces, swsp.Produces, b.DefaultProduces) - sort.Strings(produces) - consumes := producesOrDefault(operation.Consumes, swsp.Consumes, b.DefaultConsumes) - sort.Strings(consumes) var successResponse *GenResponse for _, resp := range successResponses { @@ -718,7 +728,12 @@ func (b *codeGenOpBuilder) MakeParameter(receiver string, resolver *typeResolver b.Method, b.Path, param.Name, goName) } } else if len(idMapping) > 0 { - id = idMapping[param.In][param.Name] + id, ok = idMapping[param.In][param.Name] + if !ok { + // skipped parameter + return GenParameter{}, fmt.Errorf(`%s %s, %q has an invalid parameter definition`, + b.Method, b.Path, param.Name) + } } res := GenParameter{ @@ -739,6 +754,16 @@ func (b *codeGenOpBuilder) MakeParameter(receiver string, resolver *typeResolver Extensions: param.Extensions, } + if goCustomTag, ok := param.Extensions["x-go-custom-tag"]; ok { + customTag, ok := goCustomTag.(string) + if !ok { + return GenParameter{}, fmt.Errorf(`%s %s, parameter %q: "x-go-custom-tag" field must be a string, not a %T`, + b.Method, b.Path, param.Name, goCustomTag) + } + + res.CustomTag = customTag + } + if param.In == "body" { // Process parameters declared in body (i.e. have a Schema) res.Required = param.Required @@ -964,7 +989,6 @@ func (b *codeGenOpBuilder) setBodyParamValidation(p *GenParameter) { p.HasModelBodyMap = hasModelBodyMap p.HasSimpleBodyMap = hasSimpleBodyMap } - } // makeSecuritySchemes produces a sorted list of security schemes for this operation @@ -1012,10 +1036,7 @@ func (b *codeGenOpBuilder) cloneSchema(schema *spec.Schema) *spec.Schema { // This uses a deep clone the spec document to construct a type resolver which knows about definitions when the making of this operation started, // and only these definitions. We are not interested in the "original spec", but in the already transformed spec. func (b *codeGenOpBuilder) saveResolveContext(resolver *typeResolver, schema *spec.Schema) (*typeResolver, *spec.Schema) { - if b.PristineDoc == nil { - b.PristineDoc = b.Doc.Pristine() - } - rslv := newTypeResolver(b.GenOpts.LanguageOpts.ManglePackageName(resolver.ModelsPackage, defaultModelsTarget), b.DefaultImports[b.ModelsPackage], b.PristineDoc) + rslv := newTypeResolver(b.GenOpts.LanguageOpts.ManglePackageName(resolver.ModelsPackage, defaultModelsTarget), b.DefaultImports[b.ModelsPackage], b.PristineDefs) return rslv, b.cloneSchema(schema) } @@ -1226,11 +1247,19 @@ func (b *codeGenOpBuilder) analyzeTags() (string, []string, bool) { // conflict with "operations" package is handled separately tag = renameOperationPackage(intersected, tag) } + + if matches := versionedPkgRex.FindStringSubmatch(tag); len(matches) > 2 { + // rename packages like "v1", "v2" ... as they hold a special meaning for go + tag = "version" + matches[2] + } + b.APIPackage = b.GenOpts.LanguageOpts.ManglePackageName(tag, b.APIPackage) // actual package name b.APIPackageAlias = deconflictTag(intersected, b.APIPackage) // deconflicted import alias return tag, intersected, len(filter) == 0 || len(filter) > 0 && len(intersected) > 0 } +var versionedPkgRex = regexp.MustCompile(`(?i)(v)([0-9]+)`) + func maxInt(a, b int) int { if a > b { return a @@ -1268,6 +1297,7 @@ func deconflictPkg(pkg string, renamer func(string) string) string { case "tls", "http", "fmt", "strings", "log", "flags", "pflag", "json", "time": return renamer(pkg) } + return pkg } diff --git a/vendor/github.com/go-swagger/go-swagger/generator/shared.go b/vendor/github.com/go-swagger/go-swagger/generator/shared.go index 5e2c2cee2..e466a9301 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/shared.go +++ b/vendor/github.com/go-swagger/go-swagger/generator/shared.go @@ -68,15 +68,21 @@ func DefaultSectionOpts(gen *GenOpts) { FileName: "{{ (snakize (pascalize .Name)) }}.go", }, } - if gen.IncludeCLi { - opts = append(opts, TemplateOpts{ + sec.Models = opts + } + + if len(sec.PostModels) == 0 && gen.IncludeCLi { + // For CLI, we need to postpone the generation of model-supporting source, + // in order for go imports to run properly in all cases. + opts := []TemplateOpts{ + { Name: "clidefinitionhook", Source: "asset:cliModelcli", Target: "{{ joinFilePath .Target (toPackagePath .CliPackage) }}", FileName: "{{ (snakize (pascalize .Name)) }}_model.go", - }) + }, } - sec.Models = opts + sec.PostModels = opts } if len(sec.Operations) == 0 { @@ -228,7 +234,6 @@ func DefaultSectionOpts(gen *GenOpts) { Target: "{{ joinFilePath .Target (toPackagePath .ServerPackage) }}", FileName: "auto_configure_{{ (snakize (pascalize .Name)) }}.go", }) - } else { opts = append(opts, TemplateOpts{ Name: "configure", @@ -242,7 +247,6 @@ func DefaultSectionOpts(gen *GenOpts) { } } gen.Sections = sec - } // MarkdownOpts for rendering a spec as markdown @@ -255,6 +259,7 @@ func MarkdownOpts() *LanguageOpts { // MarkdownSectionOpts for a given opts and output file. func MarkdownSectionOpts(gen *GenOpts, output string) { gen.Sections.Models = nil + gen.Sections.PostModels = nil gen.Sections.OperationGroups = nil gen.Sections.Operations = nil gen.LanguageOpts = MarkdownOpts() @@ -284,6 +289,7 @@ type SectionOpts struct { Operations []TemplateOpts `mapstructure:"operations"` OperationGroups []TemplateOpts `mapstructure:"operation_groups"` Models []TemplateOpts `mapstructure:"models"` + PostModels []TemplateOpts `mapstructure:"post_models"` } // GenOptsCommon the options for the generator @@ -344,6 +350,7 @@ type GenOptsCommon struct { AllowEnumCI bool StrictResponders bool AcceptDefinitionsOnly bool + WantsRootedErrorPath bool templates *Repository // a shallow clone of the global template repository } @@ -356,7 +363,7 @@ func (g *GenOpts) CheckOpts() error { if !filepath.IsAbs(g.Target) { if _, err := filepath.Abs(g.Target); err != nil { - return fmt.Errorf("could not locate target %s: %v", g.Target, err) + return fmt.Errorf("could not locate target %s: %w", g.Target, err) } } @@ -602,11 +609,11 @@ func (g *GenOpts) render(t *TemplateOpts, data interface{}) ([]byte, error) { } content, err := os.ReadFile(templateFile) if err != nil { - return nil, fmt.Errorf("error while opening %s template file: %v", templateFile, err) + return nil, fmt.Errorf("error while opening %s template file: %w", templateFile, err) } tt, err := template.New(t.Source).Funcs(FuncMapFunc(g.LanguageOpts)).Parse(string(content)) if err != nil { - return nil, fmt.Errorf("template parsing failed on template %s: %v", t.Name, err) + return nil, fmt.Errorf("template parsing failed on template %s: %w", t.Name, err) } templ = tt } @@ -617,7 +624,7 @@ func (g *GenOpts) render(t *TemplateOpts, data interface{}) ([]byte, error) { var tBuf bytes.Buffer if err := templ.Execute(&tBuf, data); err != nil { - return nil, fmt.Errorf("template execution failed for template %s: %v", t.Name, err) + return nil, fmt.Errorf("template execution failed for template %s: %w", t.Name, err) } log.Printf("executed template %s", t.Source) @@ -631,7 +638,7 @@ func (g *GenOpts) render(t *TemplateOpts, data interface{}) ([]byte, error) { func (g *GenOpts) write(t *TemplateOpts, data interface{}) error { dir, fname, err := g.location(t, data) if err != nil { - return fmt.Errorf("failed to resolve template location for template %s: %v", t.Name, err) + return fmt.Errorf("failed to resolve template location for template %s: %w", t.Name, err) } if t.SkipExists && fileExists(dir, fname) { @@ -643,7 +650,7 @@ func (g *GenOpts) write(t *TemplateOpts, data interface{}) error { log.Printf("creating generated file %q in %q as %s", fname, dir, t.Name) content, err := g.render(t, data) if err != nil { - return fmt.Errorf("failed rendering template data for %s: %v", t.Name, err) + return fmt.Errorf("failed rendering template data for %s: %w", t.Name, err) } if dir != "" { @@ -652,7 +659,7 @@ func (g *GenOpts) write(t *TemplateOpts, data interface{}) error { debugLog("creating directory %q for \"%s\"", dir, t.Name) // Directory settings consistent with file privileges. // Environment's umask may alter this setup - if e := os.MkdirAll(dir, 0755); e != nil { + if e := os.MkdirAll(dir, 0o755); e != nil { return e } } @@ -666,18 +673,18 @@ func (g *GenOpts) write(t *TemplateOpts, data interface{}) error { formatted, err = g.LanguageOpts.FormatContent(filepath.Join(dir, fname), content) if err != nil { log.Printf("source formatting failed on template-generated source (%q for %s). Check that your template produces valid code", filepath.Join(dir, fname), t.Name) - writeerr = os.WriteFile(filepath.Join(dir, fname), content, 0644) // #nosec + writeerr = os.WriteFile(filepath.Join(dir, fname), content, 0o644) // #nosec if writeerr != nil { - return fmt.Errorf("failed to write (unformatted) file %q in %q: %v", fname, dir, writeerr) + return fmt.Errorf("failed to write (unformatted) file %q in %q: %w", fname, dir, writeerr) } log.Printf("unformatted generated source %q has been dumped for template debugging purposes. DO NOT build on this source!", fname) - return fmt.Errorf("source formatting on generated source %q failed: %v", t.Name, err) + return fmt.Errorf("source formatting on generated source %q failed: %w", t.Name, err) } } - writeerr = os.WriteFile(filepath.Join(dir, fname), formatted, 0644) // #nosec + writeerr = os.WriteFile(filepath.Join(dir, fname), formatted, 0o644) // #nosec if writeerr != nil { - return fmt.Errorf("failed to write file %q in %q: %v", fname, dir, writeerr) + return fmt.Errorf("failed to write file %q in %q: %w", fname, dir, writeerr) } return err } @@ -713,6 +720,20 @@ func (g *GenOpts) renderApplication(app *GenApp) error { return err } } + + if len(g.Sections.PostModels) > 0 { + log.Printf("post-rendering from %d models", len(app.Models)) + for _, templateToPin := range g.Sections.PostModels { + templateConfig := templateToPin + for _, modelToPin := range app.Models { + modelData := modelToPin + if err := g.write(&templateConfig, modelData); err != nil { + return err + } + } + } + } + return nil } @@ -1069,7 +1090,7 @@ func dumpData(data interface{}) error { if err != nil { return err } - fmt.Fprintln(os.Stdout, string(bb)) + fmt.Fprintln(os.Stdout, string(bb)) // TODO(fred): not testable return nil } diff --git a/vendor/github.com/go-swagger/go-swagger/generator/spec.go b/vendor/github.com/go-swagger/go-swagger/generator/spec.go index e7399bb95..df3528a62 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/spec.go +++ b/vendor/github.com/go-swagger/go-swagger/generator/spec.go @@ -41,8 +41,11 @@ func (g *GenOpts) validateAndFlattenSpec() (*loads.Document, error) { if validationErrors != nil { str := fmt.Sprintf("The swagger spec at %q is invalid against swagger specification %s. see errors :\n", g.Spec, specDoc.Version()) - for _, desc := range validationErrors.(*swaggererrors.CompositeError).Errors { - str += fmt.Sprintf("- %s\n", desc) + var cerr *swaggererrors.CompositeError + if errors.As(validationErrors, &cerr) { + for _, desc := range cerr.Errors { + str += fmt.Sprintf("- %s\n", desc) + } } return nil, errors.New(str) } @@ -84,6 +87,16 @@ func (g *GenOpts) validateAndFlattenSpec() (*loads.Document, error) { return nil, err } + if g.FlattenOpts.Expand { + // for a similar reason as the one mentioned above for validate, + // schema expansion alters the internal doc cache in the spec. + // This nasty bug (in spec expander) affects circular references. + // So we need to reload the spec from a clone. + // Notice that since the spec inside the document has been modified, we should + // ensure that Pristine refreshes its row root document. + specDoc = specDoc.Pristine() + } + // yields the preprocessed spec document return specDoc, nil } @@ -229,7 +242,7 @@ func WithAutoXOrder(specPath string) string { } tmpFile := filepath.Join(tmpDir, filepath.Base(specPath)) - if err := os.WriteFile(tmpFile, out, 0600); err != nil { + if err := os.WriteFile(tmpFile, out, 0o600); err != nil { panic(err) } return tmpFile diff --git a/vendor/github.com/go-swagger/go-swagger/generator/structs.go b/vendor/github.com/go-swagger/go-swagger/generator/structs.go index 522be1446..145228881 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/structs.go +++ b/vendor/github.com/go-swagger/go-swagger/generator/structs.go @@ -20,6 +20,7 @@ type GenCommon struct { Copyright string TargetImportPath string + RootedErrorPath bool // wants array and map types to have a path corresponding to their type in reported errors } // GenDefinition contains all the properties to generate a @@ -85,6 +86,8 @@ type GenSchema struct { HasBaseType bool IsSubType bool IsExported bool + IsElem bool // IsElem gives some context when the schema is part of an array or a map + IsProperty bool // IsProperty gives some context when the schema is a property of an object DiscriminatorField string DiscriminatorValue string Discriminates map[string]string @@ -96,6 +99,7 @@ type GenSchema struct { StructTags []string ExtraImports map[string]string // non-standard imports detected when using external types ExternalDocs *spec.ExternalDocumentation + WantsRootedErrorPath bool } func (g GenSchema) renderMarshalTag() string { @@ -361,6 +365,8 @@ type GenParameter struct { CollectionFormat string + CustomTag string + Child *GenItems Parent *GenItems @@ -514,6 +520,8 @@ type GenOperationGroup struct { RootPackage string GenOpts *GenOpts PackageAlias string + + ClientOptions *GenClientOptions } // GenOperationGroups is a sorted collection of operation groups @@ -801,3 +809,10 @@ type GenSecurityScope struct { func (g GenSecurityRequirements) Len() int { return len(g) } func (g GenSecurityRequirements) Swap(i, j int) { g[i], g[j] = g[j], g[i] } func (g GenSecurityRequirements) Less(i, j int) bool { return g[i].Name < g[j].Name } + +// GenClientOptions holds extra pieces of information +// to generate a client. +type GenClientOptions struct { + ProducesMediaTypes []string // filled with all producers if any method as more than 1 + ConsumesMediaTypes []string // filled with all consumers if any method as more than 1 +} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/support.go b/vendor/github.com/go-swagger/go-swagger/generator/support.go index df3996df4..3794ee1a3 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/support.go +++ b/vendor/github.com/go-swagger/go-swagger/generator/support.go @@ -58,6 +58,9 @@ func GenerateMarkdown(output string, modelNames, operationIDs []string, opts *Ge if err := opts.EnsureDefaults(); err != nil { return err } + if opts.Target != "" && opts.Target != "." { + output = filepath.Join(opts.Target, output) + } MarkdownSectionOpts(opts, output) generator, err := newAppGenerator("", modelNames, operationIDs, opts) @@ -184,7 +187,7 @@ func (a *appGenerator) Generate() error { } // optional OperationGroups templates generation if err := a.GenOpts.renderOperationGroup(&opg); err != nil { - return fmt.Errorf("error while rendering operation group: %v", err) + return fmt.Errorf("error while rendering operation group: %w", err) } } } @@ -217,11 +220,13 @@ func (a *appGenerator) GenerateSupport(ap *GenApp) error { app.DefaultImports[pkgAlias] = serverPath app.ServerPackageAlias = pkgAlias - // add client import for cli generation - clientPath := path.Join(baseImport, - a.GenOpts.LanguageOpts.ManglePackagePath(a.ClientPackage, defaultClientTarget)) - clientPkgAlias := importAlias(clientPath) - app.DefaultImports[clientPkgAlias] = clientPath + if a.GenOpts.IncludeCLi { // no need to add this import when there is no CLI + // add client import for cli generation + clientPath := path.Join(baseImport, + a.GenOpts.LanguageOpts.ManglePackagePath(a.ClientPackage, defaultClientTarget)) + clientPkgAlias := importAlias(clientPath) + app.DefaultImports[clientPkgAlias] = clientPath + } return a.GenOpts.renderApplication(app) } @@ -262,9 +267,11 @@ func (a *appGenerator) makeCodegenApp() (GenApp, error) { imports := make(map[string]string, 50) alias := deconflictPkg(a.GenOpts.LanguageOpts.ManglePackageName(a.OperationsPackage, defaultOperationsTarget), renameAPIPackage) - imports[alias] = path.Join( - baseImport, - a.GenOpts.LanguageOpts.ManglePackagePath(a.OperationsPackage, defaultOperationsTarget)) + if !a.GenOpts.IsClient { // we don't want to inject this import for clients + imports[alias] = path.Join( + baseImport, + a.GenOpts.LanguageOpts.ManglePackagePath(a.OperationsPackage, defaultOperationsTarget)) + } implAlias := "" if a.GenOpts.ImplementationPackage != "" { @@ -284,7 +291,7 @@ func (a *appGenerator) makeCodegenApp() (GenApp, error) { a.GenOpts, ) if err != nil { - return GenApp{}, fmt.Errorf("error in model %s while planning definitions: %v", mn, err) + return GenApp{}, fmt.Errorf("error in model %s while planning definitions: %w", mn, err) } if model != nil { if !model.External { @@ -304,6 +311,10 @@ func (a *appGenerator) makeCodegenApp() (GenApp, error) { log.Printf("planning operations (found: %d)", len(a.Operations)) genOps := make(GenOperations, 0, len(a.Operations)) + consumesIndex := make(map[string][]string) + producesIndex := make(map[string][]string) + pristineDoc := a.SpecDoc.Pristine() + for operationName, opp := range a.Operations { o := opp.Op o.ID = operationName @@ -316,6 +327,7 @@ func (a *appGenerator) makeCodegenApp() (GenApp, error) { Imports: imports, DefaultScheme: a.DefaultScheme, Doc: a.SpecDoc, + PristineDefs: pristineDoc, Analyzed: a.Analyzed, BasePath: a.SpecDoc.BasePath(), GenOpts: a.GenOpts, @@ -355,7 +367,18 @@ func (a *appGenerator) makeCodegenApp() (GenApp, error) { op.ReceiverName = receiver op.Tags = tags // ordered tags for this operation, possibly filtered by CLI params - genOps = append(genOps, op) + + allConsumes := pruneEmpty(op.ConsumesMediaTypes) + if bldr.DefaultConsumes != "" { + allConsumes = append(allConsumes, bldr.DefaultConsumes) + } + consumesIndex[bldr.Name] = allConsumes + + allProduces := pruneEmpty(op.ProducesMediaTypes) + if bldr.DefaultProduces != "" { + allProduces = append(allProduces, bldr.DefaultProduces) + } + producesIndex[bldr.Name] = allProduces if !a.GenOpts.SkipTagPackages && tag != "" { importPath := filepath.ToSlash( @@ -364,8 +387,19 @@ func (a *appGenerator) makeCodegenApp() (GenApp, error) { a.GenOpts.LanguageOpts.ManglePackagePath(a.OperationsPackage, defaultOperationsTarget), a.GenOpts.LanguageOpts.ManglePackageName(bldr.APIPackage, defaultOperationsTarget), )) + + // check for possible conflicts that requires import aliasing + pth, aliasUsed := defaultImports[bldr.APIPackageAlias] + if (a.GenOpts.IsClient && bldr.APIPackageAlias == a.GenOpts.ClientPackage) || // we don't want import to shadow the current package + (a.GenOpts.IncludeCLi && bldr.APIPackageAlias == a.GenOpts.CliPackage) || + (aliasUsed && pth != importPath) { // was already imported with a different target + op.PackageAlias = renameOperationPackage(tags, bldr.APIPackageAlias) + bldr.APIPackageAlias = op.PackageAlias + } defaultImports[bldr.APIPackageAlias] = importPath } + + genOps = append(genOps, op) } sort.Sort(genOps) @@ -378,8 +412,12 @@ func (a *appGenerator) makeCodegenApp() (GenApp, error) { opGroups := make(GenOperationGroups, 0, len(opsGroupedByPackage)) for k, v := range opsGroupedByPackage { - log.Printf("operations for package packages %q (found: %d)", k, len(v)) + log.Printf("operations for package %q (found: %d)", k, len(v)) sort.Sort(v) + + consumesInGroup := make([]string, 0, 2) + producesInGroup := make([]string, 0, 2) + // trim duplicate extra schemas within the same package vv := make(GenOperations, 0, len(v)) seenExtraSchema := make(map[string]bool) @@ -393,6 +431,9 @@ func (a *appGenerator) makeCodegenApp() (GenApp, error) { } op.ExtraSchemas = uniqueExtraSchemas vv = append(vv, op) + + consumesInGroup = concatUnique(consumesInGroup, consumesIndex[op.Name]) + producesInGroup = concatUnique(producesInGroup, producesIndex[op.Name]) } var pkg string if len(vv) > 0 { @@ -414,6 +455,19 @@ func (a *appGenerator) makeCodegenApp() (GenApp, error) { RootPackage: a.APIPackage, GenOpts: a.GenOpts, } + + if a.GenOpts.IsClient { + // generating extra options to switch media type in client + if len(consumesInGroup) > 1 || len(producesInGroup) > 1 { + sort.Strings(producesInGroup) + sort.Strings(consumesInGroup) + options := &GenClientOptions{ + ProducesMediaTypes: producesInGroup, + ConsumesMediaTypes: consumesInGroup, + } + opGroup.ClientOptions = options + } + } opGroups = append(opGroups, opGroup) } sort.Sort(opGroups) diff --git a/vendor/github.com/go-swagger/go-swagger/generator/template_repo.go b/vendor/github.com/go-swagger/go-swagger/generator/template_repo.go index e78ae602a..2c377372d 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/template_repo.go +++ b/vendor/github.com/go-swagger/go-swagger/generator/template_repo.go @@ -4,6 +4,7 @@ "bytes" "encoding/json" "fmt" + "log" "math" "os" "path" @@ -16,8 +17,6 @@ "text/template/parse" "unicode" - "log" - "github.com/Masterminds/sprig/v3" "github.com/go-openapi/inflect" "github.com/go-openapi/runtime" @@ -94,6 +93,7 @@ func DefaultFuncMap(lang *LanguageOpts) template.FuncMap { "inspect": pretty.Sprint, "cleanPath": path.Clean, "mediaTypeName": mediaMime, + "mediaGoName": mediaGoName, "arrayInitializer": lang.arrayInitializer, "hasPrefix": strings.HasPrefix, "stringContains": strings.Contains, @@ -134,9 +134,55 @@ func DefaultFuncMap(lang *LanguageOpts) template.FuncMap { }, "docCollectionFormat": resolvedDocCollectionFormat, "trimSpace": strings.TrimSpace, + "mdBlock": markdownBlock, // markdown block "httpStatus": httpStatus, "cleanupEnumVariant": cleanupEnumVariant, "gt0": gt0, + "path": errorPath, + "cmdName": func(in interface{}) (string, error) { + // builds the name of a CLI command for a single operation + op, isOperation := in.(GenOperation) + if !isOperation { + ptr, ok := in.(*GenOperation) + if !ok { + return "", fmt.Errorf("cmdName should be called on a GenOperation, but got: %T", in) + } + op = *ptr + } + name := "Operation" + pascalize(op.Package) + pascalize(op.Name) + "Cmd" + + return name, nil // TODO + }, + "cmdGroupName": func(in interface{}) (string, error) { + // builds the name of a group of CLI commands + opGroup, ok := in.(GenOperationGroup) + if !ok { + return "", fmt.Errorf("cmdGroupName should be called on a GenOperationGroup, but got: %T", in) + } + name := "GroupOfOperations" + pascalize(opGroup.Name) + "Cmd" + + return name, nil // TODO + }, + "flagNameVar": func(in string) string { + // builds a flag name variable in CLI commands + return fmt.Sprintf("flag%sName", pascalize(in)) + }, + "flagValueVar": func(in string) string { + // builds a flag value variable in CLI commands + return fmt.Sprintf("flag%sValue", pascalize(in)) + }, + "flagDefaultVar": func(in string) string { + // builds a flag default value variable in CLI commands + return fmt.Sprintf("flag%sDefault", pascalize(in)) + }, + "flagModelVar": func(in string) string { + // builds a flag model variable in CLI commands + return fmt.Sprintf("flag%sModel", pascalize(in)) + }, + "flagDescriptionVar": func(in string) string { + // builds a flag description variable in CLI commands + return fmt.Sprintf("flag%sDescription", pascalize(in)) + }, } for k, v := range extra { @@ -327,7 +373,6 @@ funcs: t.funcs, // LoadDefaults will load the embedded templates func (t *Repository) LoadDefaults() { - for name, asset := range assets { if err := t.addFile(name, string(asset), true); err != nil { log.Fatal(err) @@ -337,26 +382,27 @@ func (t *Repository) LoadDefaults() { // LoadDir will walk the specified path and add each .gotmpl file it finds to the repository func (t *Repository) LoadDir(templatePath string) error { - err := filepath.Walk(templatePath, func(path string, info os.FileInfo, err error) error { - + err := filepath.Walk(templatePath, func(path string, _ os.FileInfo, err error) error { if strings.HasSuffix(path, ".gotmpl") { if assetName, e := filepath.Rel(templatePath, path); e == nil { if data, e := os.ReadFile(path); e == nil { if ee := t.AddFile(assetName, string(data)); ee != nil { - return fmt.Errorf("could not add template: %v", ee) + return fmt.Errorf("could not add template: %w", ee) } } // Non-readable files are skipped } } + if err != nil { return err } + // Non-template files are skipped return nil }) if err != nil { - return fmt.Errorf("could not complete template processing in directory \"%s\": %v", templatePath, err) + return fmt.Errorf("could not complete template processing in directory \"%s\": %w", templatePath, err) } return nil } @@ -392,9 +438,8 @@ func (t *Repository) addFile(name, data string, allowOverride bool) error { name = swag.ToJSONName(strings.TrimSuffix(name, ".gotmpl")) templ, err := template.New(name).Funcs(t.funcs).Parse(data) - if err != nil { - return fmt.Errorf("failed to load template %s: %v", name, err) + return fmt.Errorf("failed to load template %s: %w", name, err) } // check if any protected templates are defined @@ -441,7 +486,6 @@ func (t *Repository) SetAllowOverride(value bool) { } func findDependencies(n parse.Node) []string { - var deps []string depMap := make(map[string]bool) @@ -491,7 +535,6 @@ func findDependencies(n parse.Node) []string { } return deps - } func (t *Repository) flattenDependencies(templ *template.Template, dependencies map[string]bool) map[string]bool { @@ -516,11 +559,9 @@ func (t *Repository) flattenDependencies(templ *template.Template, dependencies } return dependencies - } func (t *Repository) addDependencies(templ *template.Template) (*template.Template, error) { - name := templ.Name() deps := t.flattenDependencies(templ, nil) @@ -545,9 +586,8 @@ func (t *Repository) addDependencies(templ *template.Template) (*template.Templa // Add it to the parse tree templ, err = templ.AddParseTree(dep, tt.Tree) - if err != nil { - return templ, fmt.Errorf("dependency error: %v", err) + return templ, fmt.Errorf("dependency error: %w", err) } } @@ -576,7 +616,6 @@ func (t *Repository) DumpTemplates() { fmt.Fprintf(buf, "Defined in `%s`\n", t.files[name]) if deps := findDependencies(templ.Tree.Root); len(deps) > 0 { - fmt.Fprintf(buf, "####requires \n - %v\n\n\n", strings.Join(deps, "\n - ")) } fmt.Fprintln(buf, "\n---") @@ -853,3 +892,99 @@ func gt0(in *int64) bool { // with a pointer return in != nil && *in > 0 } + +func errorPath(in interface{}) (string, error) { + // For schemas: + // errorPath returns an empty string litteral when the schema path is empty. + // It provides a shorthand for template statements such as: + // {{ if .Path }}{{ .Path }}{{ else }}" "{{ end }}, + // which becomes {{ path . }} + // + // When called for a GenParameter, GenResponse or GenOperation object, it just + // returns Path. + // + // Extra behavior for schemas, when the generation option RootedErroPath is enabled: + // In the case of arrays with an empty path, it adds the type name as the path "root", + // so consumers of reported errors get an idea of the originator. + + var pth string + rooted := func(schema GenSchema) string { + if schema.WantsRootedErrorPath && schema.Path == "" && (schema.IsArray || schema.IsMap) { + return `"[` + schema.Name + `]"` + } + + return schema.Path + } + + switch schema := in.(type) { + case GenSchema: + pth = rooted(schema) + case *GenSchema: + if schema == nil { + break + } + pth = rooted(*schema) + case GenDefinition: + pth = rooted(schema.GenSchema) + case *GenDefinition: + if schema == nil { + break + } + pth = rooted(schema.GenSchema) + case GenParameter: + pth = schema.Path + + // unchanged Path if called with other types + case *GenParameter: + if schema == nil { + break + } + pth = schema.Path + case GenResponse: + pth = schema.Path + case *GenResponse: + if schema == nil { + break + } + pth = schema.Path + case GenOperation: + pth = schema.Path + case *GenOperation: + if schema == nil { + break + } + pth = schema.Path + case GenItems: + pth = schema.Path + case *GenItems: + if schema == nil { + break + } + pth = schema.Path + case GenHeader: + pth = schema.Path + case *GenHeader: + if schema == nil { + break + } + pth = schema.Path + default: + return "", fmt.Errorf("errorPath should be called with GenSchema or GenDefinition, but got %T", schema) + } + + if pth == "" { + return `""`, nil + } + + return pth, nil +} + +const mdNewLine = "
" + +var mdNewLineReplacer = strings.NewReplacer("\r\n", mdNewLine, "\n", mdNewLine, "\r", mdNewLine) + +func markdownBlock(in string) string { + in = strings.TrimSpace(in) + + return mdNewLineReplacer.Replace(in) +} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/cli.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/cli.gotmpl index 3d88c5beb..073e33cfa 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/cli.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/cli.gotmpl @@ -1,42 +1,45 @@ // Code generated by go-swagger; DO NOT EDIT. - {{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }} - package {{ .GenOpts.CliPackage }} // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( - {{ imports .DefaultImports }} - {{ imports .Imports }} + "log" + "os" + "path" + "path/filepath" - "github.com/spf13/cobra" - "github.com/spf13/viper" - "github.com/go-openapi/runtime" - "github.com/go-openapi/swag" - httptransport "github.com/go-openapi/runtime/client" - homedir "github.com/mitchellh/go-homedir" + httptransport "github.com/go-openapi/runtime/client" + "github.com/spf13/cobra" + "github.com/spf13/viper" + {{ imports .DefaultImports }} + {{ imports .Imports }} ) -// debug flag indicating that cli should output debug logs -var debug bool -// config file location -var configFile string -// dry run flag -var dryRun bool +var ( + // debug flag indicating that cli should output debug logs + debug bool -// name of the executable -var exeName string = filepath.Base(os.Args[0]) + // config file location + configFile string + + // dry run flag + dryRun bool + + // name of the executable + exeName = filepath.Base(os.Args[0]) +) // logDebugf writes debug log to stdout func logDebugf(format string, v ...interface{}) { - if !debug{ - return - } - log.Printf(format, v...) + if !debug{ + return + } + log.Printf(format, v...) } {{/*TODO: make this a swagger cli option*/}} @@ -44,199 +47,240 @@ func logDebugf(format string, v ...interface{}) { var maxDepth int = 5 // makeClient constructs a client object -func makeClient(cmd *cobra.Command, args []string) (*client.{{ pascalize .Name }}, error) { - hostname := viper.GetString("hostname") - viper.SetDefault("base_path", client.DefaultBasePath) - basePath := viper.GetString("base_path") - scheme := viper.GetString("scheme") +func makeClient(cmd *cobra.Command, _ []string) (*client.{{ pascalize .Name }}, error) { + hostname := viper.GetString("hostname") + viper.SetDefault("base_path", client.DefaultBasePath) + basePath := viper.GetString("base_path") + scheme := viper.GetString("scheme") - r := httptransport.New(hostname, basePath, []string{scheme}) - r.SetDebug(debug) + r := httptransport.New(hostname, basePath, []string{scheme}) + r.SetDebug(debug) - {{- /* user might define custom mediatype xxx/json and there is no registered ones to handle. */}} - // set custom producer and consumer to use the default ones - {{ range .Consumes }} - {{ range .AllSerializers }} - {{- if stringContains .MediaType "json" }} - r.Consumers["{{ .MediaType }}"] = runtime.JSONConsumer() - {{- else }} - // warning: consumes {{ .MediaType }} is not supported by go-swagger cli yet - {{- end }} - {{- end }} - {{ end }} - {{ range .Produces }} - {{- range .AllSerializers }} - {{- if stringContains .MediaType "json" }} - r.Producers["{{ .MediaType }}"] = runtime.JSONProducer() - {{- else }} - // warning: produces {{ .MediaType }} is not supported by go-swagger cli yet - {{- end }} - {{- end }} - {{ end }} + {{- /* user might define custom mediatype xxx/json and there is no registered ones to handle. */}} + // set custom producer and consumer to use the default ones + {{ range .Consumes }} + {{ range .AllSerializers }} + {{- if stringContains .MediaType "json" }} + r.Consumers["{{ .MediaType }}"] = runtime.JSONConsumer() + {{- else }} + // warning: consumes {{ .MediaType }} is not supported by go-swagger cli yet + {{- end }} + {{- end }} + {{ end }} + {{ range .Produces }} + {{- range .AllSerializers }} + {{- if stringContains .MediaType "json" }} + r.Producers["{{ .MediaType }}"] = runtime.JSONProducer() + {{- else }} + // warning: produces {{ .MediaType }} is not supported by go-swagger cli yet + {{- end }} + {{- end }} + {{- end }} - {{- if .SecurityDefinitions }} - auth, err := makeAuthInfoWriter(cmd) - if err != nil { - return nil, err - } - r.DefaultAuthentication = auth - {{ end }} - appCli := client.New(r, strfmt.Default) - logDebugf("Server url: %v://%v", scheme, hostname) - return appCli, nil + {{- if .SecurityDefinitions }} + + auth, err := makeAuthInfoWriter(cmd) + if err != nil { + return nil, err + } + r.DefaultAuthentication = auth + {{- end }} + + appCli := client.New(r, strfmt.Default) + logDebugf("Server url: %v://%v", scheme, hostname) + + return appCli, nil } // MakeRootCmd returns the root cmd func MakeRootCmd() (*cobra.Command, error) { - cobra.OnInitialize(initViperConfigs) - - // Use executable name as the command name - rootCmd := &cobra.Command{ - Use: exeName, - } - {{/*note: viper binded flag value must be retrieved from viper rather than cmd*/}} - // register basic flags - rootCmd.PersistentFlags().String("hostname", client.DefaultHost, "hostname of the service") - viper.BindPFlag("hostname", rootCmd.PersistentFlags().Lookup("hostname")) - rootCmd.PersistentFlags().String("scheme", client.DefaultSchemes[0], fmt.Sprintf("Choose from: %v", client.DefaultSchemes)) - viper.BindPFlag("scheme", rootCmd.PersistentFlags().Lookup("scheme")) - rootCmd.PersistentFlags().String("base-path", client.DefaultBasePath, fmt.Sprintf("For example: %v", client.DefaultBasePath)) - viper.BindPFlag("base_path", rootCmd.PersistentFlags().Lookup("base-path")) + cobra.OnInitialize(initViperConfigs) - // configure debug flag - rootCmd.PersistentFlags().BoolVar(&debug, "debug", false, "output debug logs") - // configure config location - rootCmd.PersistentFlags().StringVar(&configFile, "config", "", "config file path") - // configure dry run flag - rootCmd.PersistentFlags().BoolVar(&dryRun, "dry-run", false, "do not send the request to server") + // Use executable name as the command name + rootCmd := &cobra.Command{ + Use: exeName, + } + {{/*note: viper binded flag value must be retrieved from viper rather than cmd*/}} + // register basic flags + rootCmd.PersistentFlags().String("hostname", client.DefaultHost, "hostname of the service") + if err := viper.BindPFlag("hostname", rootCmd.PersistentFlags().Lookup("hostname")) ; err != nil { + return nil, err + } + rootCmd.PersistentFlags().String("scheme", client.DefaultSchemes[0], fmt.Sprintf("Choose from: %v", client.DefaultSchemes)) + if err := viper.BindPFlag("scheme", rootCmd.PersistentFlags().Lookup("scheme")) ; err != nil { + return nil, err + } + rootCmd.PersistentFlags().String("base-path", client.DefaultBasePath, fmt.Sprintf("For example: %v", client.DefaultBasePath)) + if err := viper.BindPFlag("base_path", rootCmd.PersistentFlags().Lookup("base-path")) ; err != nil { + return nil, err + } - // register security flags - {{- if .SecurityDefinitions }} - if err := registerAuthInoWriterFlags(rootCmd); err != nil{ - return nil, err - } - {{- end }} - // add all operation groups -{{- range .OperationGroups -}} - {{- $operationGroupCmdVarName := printf "operationGroup%vCmd" (pascalize .Name) }} - {{ $operationGroupCmdVarName }}, err := makeOperationGroup{{ pascalize .Name }}Cmd() - if err != nil { - return nil, err - } - rootCmd.AddCommand({{ $operationGroupCmdVarName }}) -{{ end }} + // configure debug flag + rootCmd.PersistentFlags().BoolVar(&debug, "debug", false, "output debug logs") + // configure config location + rootCmd.PersistentFlags().StringVar(&configFile, "config", "", "config file path") + // configure dry run flag + rootCmd.PersistentFlags().BoolVar(&dryRun, "dry-run", false, "do not send the request to server") - // add cobra completion - rootCmd.AddCommand(makeGenCompletionCmd()) + // register security flags + {{- if .SecurityDefinitions }} + if err := registerAuthInoWriterFlags(rootCmd); err != nil{ + return nil, err + } + {{- end }} - return rootCmd, nil + // add all operation groups +{{- range $index,$element := .OperationGroups }} + c{{ $index }}, err := make{{ cmdGroupName $element }}() + if err != nil { + return nil, err + } + rootCmd.AddCommand(c{{ $index}}) +{{- end }} + + // add cobra completion + rootCmd.AddCommand(makeGenCompletionCmd()) + + return rootCmd, nil } // initViperConfigs initialize viper config using config file in '$HOME/.config//config.' // currently hostname, scheme and auth tokens can be specified in this config file. func initViperConfigs() { - if configFile != "" { - // use user specified config file location - viper.SetConfigFile(configFile) - }else{ - // look for default config - // Find home directory. - home, err := homedir.Dir() - cobra.CheckErr(err) + if configFile != "" { + // use user specified config file location + viper.SetConfigFile(configFile) + } else{ + var ( + configDir string + err error + ) - // Search config in home directory with name ".cobra" (without extension). - viper.AddConfigPath(path.Join(home, ".config", exeName)) - viper.SetConfigName("config") - } + // look for default config (OS-specific, e.g. ".config" on linux) + configDir, err = os.UserConfigDir() + if err != nil { + // fallback and try finding the home directory. + home, err := os.UserHomeDir() + cobra.CheckErr(err) + configDir = path.Join(home, ".config") + } - if err := viper.ReadInConfig(); err != nil { - logDebugf("Error: loading config file: %v", err) - return - } - logDebugf("Using config file: %v", viper.ConfigFileUsed()) + // Search config in the config directory with name of the CLI binary (without extension). + configDir = path.Join(configDir, exeName) + viper.AddConfigPath(configDir) + viper.SetConfigName("config") + } + + if err := viper.ReadInConfig(); err != nil { + logDebugf("Error: loading config file: %v", err) + return + } + logDebugf("Using config file: %v", viper.ConfigFileUsed()) } {{- if .SecurityDefinitions }} -{{- /*youyuan: rework this since spec may define multiple auth schemes. - cli needs to detect which one user passed rather than add all of them.*/}} +{{- /*youyuan: rework this since spec may define multiple auth schemes. + cli needs to detect which one user passed rather than add all of them.*/}} + // registerAuthInoWriterFlags registers all flags needed to perform authentication func registerAuthInoWriterFlags(cmd *cobra.Command) error { {{- range .SecurityDefinitions }} - /*{{.Name}} {{.Description}}*/ - {{- if .IsBasicAuth }} - cmd.PersistentFlags().String("username", "", "username for basic auth") - viper.BindPFlag("username", cmd.PersistentFlags().Lookup("username")) - cmd.PersistentFlags().String("password", "", "password for basic auth") - viper.BindPFlag("password", cmd.PersistentFlags().Lookup("password")) - {{- end }} - {{- if .IsAPIKeyAuth }} - cmd.PersistentFlags().String("{{.Name}}", "", `{{.Description}}`) - viper.BindPFlag("{{.Name}}", cmd.PersistentFlags().Lookup("{{.Name}}")) - {{- end }} - {{- if .IsOAuth2 }} - // oauth2: let user provide the token in a flag, rather than implement the logic to fetch the token. - cmd.PersistentFlags().String("oauth2-token", "", `{{.Description}}`) - viper.BindPFlag("oauth2-token", cmd.PersistentFlags().Lookup("oauth2-token")) - {{- end }} -{{- end }} - return nil + // {{.Name}} + {{- if .Description }} + {{- comment .Description }} + {{- end }} + {{- if .IsBasicAuth }} + cmd.PersistentFlags().String("username", "", "username for basic auth") + if err := viper.BindPFlag("username", cmd.PersistentFlags().Lookup("username")) ; err != nil { + return err + } + cmd.PersistentFlags().String("password", "", "password for basic auth") + if err := viper.BindPFlag("password", cmd.PersistentFlags().Lookup("password")) ; err != nil { + return err + } + {{- end }} + {{- if .IsAPIKeyAuth }} + cmd.PersistentFlags().String("{{.Name}}", "", `{{.Description}}`) + if err := viper.BindPFlag("{{.Name}}", cmd.PersistentFlags().Lookup("{{.Name}}")) ; err != nil { + return err + } + {{- end }} + {{- if .IsOAuth2 }} + // oauth2: let user provide the token in a flag, rather than implement the logic to fetch the token. + cmd.PersistentFlags().String("oauth2-token", "", `{{.Description}}`) + if err := viper.BindPFlag("oauth2-token", cmd.PersistentFlags().Lookup("oauth2-token")) ; err != nil { + return err + } + {{- end }} +{{ end }} + + return nil } // makeAuthInfoWriter retrieves cmd flags and construct an auth info writer func makeAuthInfoWriter(cmd *cobra.Command) (runtime.ClientAuthInfoWriter, error) { - auths := []runtime.ClientAuthInfoWriter{} + auths := []runtime.ClientAuthInfoWriter{} {{- range .SecurityDefinitions }} - /*{{.Name}} {{.Description}}*/ - {{- if .IsBasicAuth }} - if viper.IsSet("username") { - usr := viper.GetString("username") - if !viper.IsSet("password"){ - return nil, fmt.Errorf("Basic Auth password for user [%v] is not provided.", usr) - } - pwd := viper.GetString("password") - auths = append(auths, httptransport.BasicAuth(usr,pwd)) + + // {{.Name}} + {{- if .Description }} + {{- comment .Description }} + {{- end }} + {{- if .IsBasicAuth }} + if viper.IsSet("username") { + usr := viper.GetString("username") + if !viper.IsSet("password"){ + return nil, fmt.Errorf("Basic Auth password for user [%v] is not provided.", usr) } - {{- end }} - {{- if .IsAPIKeyAuth }} - if viper.IsSet("{{.Name}}") { - {{ pascalize .Name }}Key := viper.GetString("{{.Name}}") - auths = append(auths, httptransport.APIKeyAuth("{{.Name}}", "{{.In}}", {{ pascalize .Name }}Key)) - } - {{- end }} - {{- if .IsOAuth2 }} - if viper.IsSet("oauth2-token") { - // oauth2 workflow for generated CLI is not ideal. - // If you have suggestions on how to support it, raise an issue here: https://github.com/go-swagger/go-swagger/issues - // This will be added to header: "Authorization: Bearer {oauth2-token value}" - token := viper.GetString("oauth2-token") - auths = append(auths, httptransport.BearerToken(token)) - } - {{- end }} + pwd := viper.GetString("password") + auths = append(auths, httptransport.BasicAuth(usr,pwd)) + } + {{- end }} + {{- if .IsAPIKeyAuth }} + if viper.IsSet("{{.Name}}") { + {{ pascalize .Name }}Key := viper.GetString("{{.Name}}") + auths = append(auths, httptransport.APIKeyAuth("{{.Name}}", "{{.In}}", {{ pascalize .Name }}Key)) + } + {{- end }} + {{- if .IsOAuth2 }} + if viper.IsSet("oauth2-token") { + // oauth2 workflow for generated CLI is not ideal. + // If you have suggestions on how to support it, raise an issue here: https://github.com/go-swagger/go-swagger/issues + // This will be added to header: "Authorization: Bearer {oauth2-token value}" + token := viper.GetString("oauth2-token") + auths = append(auths, httptransport.BearerToken(token)) + } + {{- end }} {{- end }} - if len(auths) == 0 { - logDebugf("Warning: No auth params detected.") - return nil, nil - } - // compose all auths together - return httptransport.Compose(auths...), nil + + if len(auths) == 0 { + logDebugf("Warning: No auth params detected.") + return nil, nil + } + + // compose all auths together + return httptransport.Compose(auths...), nil } {{- end }} -{{ range .OperationGroups -}} -func makeOperationGroup{{ pascalize .Name }}Cmd() (*cobra.Command, error) { - {{- $operationGroupCmdVarName := printf "operationGroup%vCmd" (pascalize .Name) }} - {{ $operationGroupCmdVarName }} := &cobra.Command{ - Use: "{{ .Name }}", - Long: `{{ .Description }}`, - } -{{ range .Operations }} - {{- $operationCmdVarName := printf "operation%vCmd" (pascalize .Name) }} - {{ $operationCmdVarName }}, err := makeOperation{{pascalize .Package}}{{ pascalize .Name }}Cmd() - if err != nil { - return nil, err - } - {{ $operationGroupCmdVarName }}.AddCommand({{ $operationCmdVarName }}) -{{ end }} - return {{ $operationGroupCmdVarName }}, nil +{{- range .OperationGroups -}} + +// make{{ cmdGroupName . }} returns a parent command to handle all operations with tag {{ printf "%q" .Name }} +func make{{ cmdGroupName . }}() (*cobra.Command, error) { + parent := &cobra.Command{ + Use: "{{ .Name }}", + Long: `{{ .Description }}`, + } + + {{- range $index,$element := .Operations }} + + sub{{ $index }}, err := make{{ cmdName $element }}() + if err != nil { + return nil, err + } + parent.AddCommand(sub{{ $index }}) + {{- end }} + + return parent, nil } -{{ end }} {{/*operation group*/}} +{{- end }} {{/*operation group*/}} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/main.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/main.gotmpl index 6cc470a2f..e94e617e5 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/main.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/main.gotmpl @@ -1,28 +1,29 @@ // Code generated by go-swagger; DO NOT EDIT. - {{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }} - package main import ( - "encoding/json" - {{ imports .DefaultImports }} - {{ imports .Imports }} + "fmt" + "os" + + {{ imports .DefaultImports }} + {{ imports .Imports }} ) // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command func main() { - rootCmd,err := cli.MakeRootCmd() + rootCmd, err := cli.MakeRootCmd() if err != nil { - fmt.Println("Cmd construction error: ", err) + fmt.Println("cmd construction error: ", err) os.Exit(1) } - - if err := rootCmd.Execute(); err != nil { + + if err = rootCmd.Execute(); err != nil { + fmt.Println("cmd execute error: ", err) os.Exit(1) } -} \ No newline at end of file +} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/modelcli.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/modelcli.gotmpl index d93e91d41..49b106bf9 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/modelcli.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/modelcli.gotmpl @@ -11,9 +11,12 @@ package cli import ( - {{ imports .DefaultImports }} - {{ imports .Imports }} - "github.com/spf13/cobra" + "encoding/json" + "fmt" + + "github.com/spf13/cobra" + {{ imports .DefaultImports }} + {{ imports .Imports }} ) // Schema cli for {{.GoType}} @@ -22,4 +25,4 @@ import ( {{ range .ExtraSchemas }} // Extra schema cli for {{.GoType}} {{ template "modelschemacli" .}} -{{ end }} \ No newline at end of file +{{ end }} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/operation.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/operation.gotmpl index 10666ed78..dc09dac07 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/operation.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/operation.gotmpl @@ -1,26 +1,26 @@ // Code generated by go-swagger; DO NOT EDIT. - {{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }} -{{- /*TODO: do not hardcode cli pkg*/}} -package cli +package cli {{/* TODO: do not hardcode cli pkg */}} // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( - {{ imports .DefaultImports }} - {{ imports .Imports }} + "fmt" - "github.com/spf13/cobra" - "github.com/go-openapi/runtime" - "github.com/go-openapi/swag" - httptransport "github.com/go-openapi/runtime/client" + {{ imports .DefaultImports }} + {{ imports .Imports }} + + "github.com/spf13/cobra" + "github.com/go-openapi/runtime" + "github.com/go-openapi/swag" + httptransport "github.com/go-openapi/runtime/client" ) -// makeOperation{{pascalize .Package}}{{ pascalize .Name }}Cmd returns a cmd to handle operation {{ camelize .Name }} -func makeOperation{{pascalize .Package}}{{ pascalize .Name }}Cmd() (*cobra.Command, error) { +// make{{ cmdName . }} returns a command to handle operation {{ camelize .Name }} +func make{{ cmdName . }}() (*cobra.Command, error) { cmd := &cobra.Command{ Use: "{{ .Name }}", Short: `{{ escapeBackticks .Description}}`, @@ -46,12 +46,11 @@ func runOperation{{pascalize $operationGroup }}{{ pascalize $operation }}(cmd *c // retrieve flag values from cmd and fill params params := {{ .PackageAlias }}.New{{ pascalize .Name}}Params() {{- range .Params }} - if err, _ := retrieveOperation{{pascalize $operationGroup }}{{ pascalize $operation }}{{ pascalize .Name }}Flag(params, "", cmd); err != nil{ + if err, _ = retrieveOperation{{pascalize $operationGroup }}{{ pascalize $operation }}{{ pascalize .Name }}Flag(params, "", cmd); err != nil{ return err } {{- end }} {{/*Params*/}} - if dryRun { - {{/* Note: dry run is not very useful for now, but useful when validation is added in future*/}} + if dryRun { {{/* Note: dry run is not very useful for now, but useful when validation is added in future*/}} logDebugf("dry-run flag specified. Skip sending request.") return nil } @@ -61,10 +60,11 @@ func runOperation{{pascalize $operationGroup }}{{ pascalize $operation }}(cmd *c if err != nil { return err } - if !debug{ - {{/* In debug mode content should have been printed in transport layer, so do not print again*/}} + + if !debug{ {{/* In debug mode content should have been printed in transport layer, so do not print again*/}} fmt.Println(msgStr) } + return nil } @@ -77,9 +77,9 @@ func registerOperation{{pascalize $operationGroup }}{{ pascalize $operation }}Pa {{- end }} return nil } - {{/*register functions for each fields in this operation*/}} {{- range .Params }} + func registerOperation{{pascalize $operationGroup }}{{ pascalize $operation }}{{pascalize .Name }}ParamFlags(cmdPrefix string, cmd *cobra.Command) error{ {{- if .IsPrimitive }} {{ template "primitiveregistrator" . }} @@ -96,12 +96,12 @@ func registerOperation{{pascalize $operationGroup }}{{ pascalize $operation }}{{ } {{- end }} -{{/*functions to retreive each field of params*/}} +{{/*functions to retrieve each field of params*/}} {{- range .Params }} + func retrieveOperation{{pascalize $operationGroup }}{{ pascalize $operation }}{{ pascalize .Name }}Flag(m *{{ $operationPkgAlias }}.{{ pascalize $operation }}Params, cmdPrefix string, cmd *cobra.Command) (error,bool){ retAdded := false {{- $flagStr := .Name }} - {{- $flagValueVar := printf "%vValue" (camelize .Name) }} {{- /*only set the param if user set the flag*/}} if cmd.Flags().Changed("{{ $flagStr }}") { {{- if .IsPrimitive }} @@ -113,16 +113,16 @@ func retrieveOperation{{pascalize $operationGroup }}{{ pascalize $operation }}{{ {{- else if and .IsBodyParam .Schema .IsComplexObject (not .IsStream) }} {{- /*schema payload can be passed in cmd as a string and here is unmarshalled to model struct and attached in params*/}} // Read {{ $flagStr }} string from cmd and unmarshal - {{ $flagValueVar }}Str, err := cmd.Flags().GetString("{{ $flagStr }}") + {{ flagValueVar .Name }}Str, err := cmd.Flags().GetString("{{ $flagStr }}") if err != nil { return err, false } {{/*Note anonymous body schema is not pointer*/}} - {{ $flagValueVar }} := {{if containsPkgStr .GoType}}{{ .GoType }}{{else}}{{ .Pkg }}.{{.GoType}}{{ end }}{} - if err := json.Unmarshal([]byte({{ $flagValueVar }}Str), &{{ $flagValueVar }}); err!= nil{ + {{ flagValueVar .Name }} := {{if containsPkgStr .GoType}}{{ .GoType }}{{else}}{{ .Pkg }}.{{.GoType}}{{ end }}{} + if err := json.Unmarshal([]byte({{ flagValueVar .Name }}Str), &{{ flagValueVar .Name }}); err!= nil{ return fmt.Errorf("cannot unmarshal {{ $flagStr }} string in {{.GoType}}: %v", err), false } - m.{{ .ID }} = {{- if .IsNullable }}&{{- end }}{{ $flagValueVar }} + m.{{ .ID }} = {{- if .IsNullable }}&{{- end }}{{ flagValueVar .Name }} {{- else }} // warning: {{.GoType}} is not supported by go-swagger cli yet {{- end }} {{/*end go type case*/}} @@ -131,32 +131,32 @@ func retrieveOperation{{pascalize $operationGroup }}{{ pascalize $operation }}{{ {{- /* Add flags to capture fields in Body. If previously Body struct was constructed in unmarshalling body string, then reuse the struct, otherwise construct an empty value struct to fill. Here body field flags overwrites unmarshalled body string values. */}} - {{- $flagModelVar := printf "%vModel" (camelize $flagValueVar) }} - {{ $flagModelVar }} := m.{{ .ID }} - if swag.IsZero({{ $flagModelVar }}){ - {{ $flagModelVar }} = {{- if .IsNullable }}&{{- end }}{{if containsPkgStr .GoType}}{{ .GoType }}{{else}}{{ .Pkg }}.{{.GoType}}{{ end }}{} + {{ flagModelVar .Name }} := m.{{ .ID }} + if swag.IsZero({{ flagModelVar .Name }}){ + {{ flagModelVar .Name }} = {{- if .IsNullable }}&{{- end }}{{if containsPkgStr .GoType}}{{ .GoType }}{{else}}{{ .Pkg }}.{{.GoType}}{{ end }}{} } {{- /*Only attach the body struct in params if user passed some flag filling some body fields.*/}} - {{- /* add "&" to $flagModelVar when it is not nullable because the retrieve method always expects a pointer */}} - err, added := retrieveModel{{ pascalize (dropPackage .GoType) }}Flags(0, {{if not .IsNullable}}&{{end}}{{ $flagModelVar }}, "{{ camelize (dropPackage .GoType) }}", cmd) + {{- /* add "&" to flagModelVar .Name when it is not nullable because the retrieve method always expects a pointer */}} + err, added := retrieveModel{{ pascalize (dropPackage .GoType) }}Flags(0, {{if not .IsNullable}}&{{end}}{{ flagModelVar .Name }}, "{{ camelize (dropPackage .GoType) }}", cmd) if err != nil{ return err, false } if added { - m.{{.ID}} = {{ $flagModelVar }} + m.{{.ID}} = {{ flagModelVar .Name }} } - if dryRun && debug { - {{/* dry run we don't get trasnport debug strings, so print it here*/}} - {{- $bodyDebugVar := printf "%vDebugBytes" (camelize $flagValueVar) }} + + if dryRun && debug { {{/* dry run we don't get trasnport debug strings, so print it here*/}} + {{- $bodyDebugVar := printf "%vDebugBytes" (flagValueVar .Name) }} {{ $bodyDebugVar }}, err := json.Marshal(m.{{.ID}}) if err != nil{ return err, false } logDebugf("{{.ID }} dry-run payload: %v", string({{ $bodyDebugVar }})) } - retAdded = retAdded || added - {{/*body debug string will be printed in transport layer*/}} + + retAdded = retAdded || added {{/*body debug string will be printed in transport layer*/}} {{- end }} + return nil, retAdded } {{- end }} {{/*Params*/}} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/registerflag.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/registerflag.gotmpl index 637811155..fb0dba04b 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/registerflag.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/registerflag.gotmpl @@ -8,34 +8,32 @@ {{- if .Enum }} {{- $fullDescription = printf "Enum: %v. %v" (json .Enum) $fullDescription}} {{- end }} - {{ camelize .Name }}Description := `{{ $fullDescription }}` + {{ flagDescriptionVar .Name }} := `{{ $fullDescription }}` {{ end }} {{ define "flagnamevar" }} - {{- $flagNameVar := printf "%vFlagName" (camelize .Name) }} - var {{ $flagNameVar }} string + var {{ flagNameVar .Name }} string if cmdPrefix == "" { - {{ $flagNameVar }} = "{{ .Name }}" + {{ flagNameVar .Name }} = "{{ .Name }}" }else{ - {{ $flagNameVar }} = fmt.Sprintf("%v.{{ .Name }}", cmdPrefix) + {{ flagNameVar .Name }} = fmt.Sprintf("%v.{{ .Name }}", cmdPrefix) } {{ end }} {{ define "flagdefaultvar" }} - {{ $defaultVar := printf "%vFlagDefault" (camelize .Name) }} - var {{ $defaultVar}} {{ .GoType }} {{ if .Default }}= {{ printf "%#v" .Default }}{{ end }} + var {{ flagDefaultVar .Name }} {{ .GoType }} {{ if .Default }}= {{ printf "%#v" .Default }}{{ end }} {{ end }} {{/* Not used. CLI does not mark flag as required, and required will be checked by validation in future */}} {{/* {{ define "requiredregistrator" }} - if err := cmd.MarkPersistentFlagRequired({{ camelize .Name }}FlagName); err != nil{ + if err := cmd.MarkPersistentFlagRequired({{ flagNameVar .Name }}); err != nil{ return err } {{ end }} */}} {{ define "enumcompletion" }} {{/*only used for primitive types. completion type is always string.*/}} {{ if .Enum }} -if err := cmd.RegisterFlagCompletionFunc({{ camelize .Name }}FlagName, +if err := cmd.RegisterFlagCompletionFunc({{ flagNameVar .Name }}, func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { var res []string if err := json.Unmarshal([]byte(`{{ json .Enum }}`), &res); err != nil { @@ -54,12 +52,12 @@ if err := cmd.RegisterFlagCompletionFunc({{ camelize .Name }}FlagName, {{ template "flagdescriptionvar" . }} {{ template "flagnamevar" . }} {{ template "flagdefaultvar" . }} - _ = cmd.PersistentFlags().{{ pascalize .GoType }}({{ camelize .Name }}FlagName, {{ camelize .Name }}FlagDefault, {{ (camelize .Name) }}Description) + _ = cmd.PersistentFlags().{{ pascalize .GoType }}({{ flagNameVar .Name }}, {{ flagDefaultVar .Name }}, {{ flagDescriptionVar .Name }}) {{ template "enumcompletion" . }} - {{- else if or (eq .GoType "strfmt.DateTime") (eq .GoType "strfmt.UUID") (eq .GoType "strfmt.ObjectId") }} {{/* read as string */}} + {{- else if or (eq .GoType "strfmt.DateTime") (eq .GoType "strfmt.UUID") (eq .GoType "strfmt.ObjectId") (eq .GoType "strfmt.ULID") }} {{/* read as string */}} {{ template "flagdescriptionvar" . }} {{ template "flagnamevar" . }} - _ = cmd.PersistentFlags().String({{ camelize .Name }}FlagName, "", {{ (camelize .Name) }}Description) + _ = cmd.PersistentFlags().String({{ flagNameVar .Name }}, "", {{ flagDescriptionVar .Name }}) {{ template "enumcompletion" . }} {{- else }} // warning: primitive {{.Name}} {{.GoType }} is not supported by go-swagger cli yet @@ -71,12 +69,12 @@ if err := cmd.RegisterFlagCompletionFunc({{ camelize .Name }}FlagName, {{ template "flagdescriptionvar" . }} {{ template "flagnamevar" . }} {{ template "flagdefaultvar" . }} - _ = cmd.PersistentFlags().{{ pascalize .GoType }}Slice({{ camelize .Name }}FlagName, {{ camelize .Name }}FlagDefault, {{ (camelize .Name) }}Description) + _ = cmd.PersistentFlags().{{ pascalize .GoType }}Slice({{ flagNameVar .Name }}, {{ flagDefaultVar .Name }}, {{ flagDescriptionVar .Name }}) {{ template "enumcompletion" . }} - {{- else if or (eq .GoType "[]strfmt.DateTime") (eq .GoType "[]strfmt.UUID") (eq .GoType "[]strfmt.ObjectId") }} {{/* read as string */}} + {{- else if or (eq .GoType "[]strfmt.DateTime") (eq .GoType "[]strfmt.UUID") (eq .GoType "[]strfmt.ObjectId") (eq .GoType "[]strfmt.ULID") }} {{/* read as string */}} {{ template "flagdescriptionvar" . }} {{ template "flagnamevar" . }} - _ = cmd.PersistentFlags().StringSlice({{ camelize .Name }}FlagName, []string{}, {{ (camelize .Name) }}Description) + _ = cmd.PersistentFlags().StringSlice({{ flagNameVar .Name }}, []string{}, {{ flagDescriptionVar .Name }}) {{- else }} // warning: array {{.Name}} {{.GoType }} is not supported by go-swagger cli yet {{- end }} @@ -86,7 +84,7 @@ if err := cmd.RegisterFlagCompletionFunc({{ camelize .Name }}FlagName, {{/* each body parameter gets a string flag to input json raw string */}} {{ define "modelparamstringregistrator" }} {{ template "flagnamevar" . }} - _ = cmd.PersistentFlags().String({{ camelize .Name }}FlagName, "", "Optional json string for [{{ .Name }}]. {{ .Description }}") + _ = cmd.PersistentFlags().String({{ flagNameVar .Name }}, "", `Optional json string for [{{ .Name }}]. {{ escapeBackticks .Description }}`) {{ end }} {{ define "modelparamregistrator" }} {{/* register a param that has a schema */}} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/retrieveflag.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/retrieveflag.gotmpl index a1ff1e5de..d702340ab 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/retrieveflag.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/retrieveflag.gotmpl @@ -1,59 +1,55 @@ {{/*util functions to retrieve flags*/}} {{ define "primitiveretriever" }} - {{- $flagValueVar := printf "%vFlagValue" (camelize .Name) }} - {{- $flagNameVar := printf "%vFlagName" (camelize .Name )}} {{- if or (eq .GoType "int64") (eq .GoType "int32") (eq .GoType "string") (eq .GoType "float64") (eq .GoType "float32") (eq .GoType "bool") }} {{ template "flagnamevar" . }} - {{ $flagValueVar }}, err := cmd.Flags().Get{{pascalize .GoType}}({{ $flagNameVar }}) + {{ flagValueVar .Name }}, err := cmd.Flags().Get{{pascalize .GoType}}({{ flagNameVar .Name }}) if err != nil{ return err, false } {{- /* reciever by convention is m for CLI */}} - m.{{ pascalize .Name }} = {{- if .IsNullable }}&{{- end }}{{ $flagValueVar }} + m.{{ pascalize .Name }} = {{- if .IsNullable }}&{{- end }}{{ flagValueVar .Name }} {{- else if or (eq .GoType "strfmt.DateTime") (eq .GoType "strfmt.ObjectId") (eq .GoType "strfmt.UUID" ) }} {{/*Get flag value as string, then parse it*/}} {{/*Many of the strfmt types can be added here*/}} {{ template "flagnamevar" . }} - {{ $flagValueVar }}Str, err := cmd.Flags().GetString({{ $flagNameVar }}) + {{ flagValueVar .Name }}Str, err := cmd.Flags().GetString({{ flagNameVar .Name }}) if err != nil{ return err, false } - var {{ $flagValueVar }} {{ .GoType }} - if err := {{ $flagValueVar }}.UnmarshalText([]byte({{ $flagValueVar }}Str)); err != nil{ + var {{ flagValueVar .Name }} {{ .GoType }} + if err := {{ flagValueVar .Name }}.UnmarshalText([]byte({{ flagValueVar .Name }}Str)); err != nil{ return err, false } - m.{{ pascalize .Name }} = {{- if .IsNullable }}&{{- end }}{{ $flagValueVar }} + m.{{ pascalize .Name }} = {{- if .IsNullable }}&{{- end }}{{ flagValueVar .Name }} {{- else }} // warning: primitive {{.Name}} {{.GoType }} is not supported by go-swagger cli yet {{- end }} {{ end }} {{ define "arrayretriever" }} - {{- $flagValueVar := printf "%vFlagValues" (camelize .Name) }} - {{- $flagNameVar := printf "%vFlagName" (camelize .Name )}} {{- if or (eq .GoType "[]int64") (eq .GoType "[]int32") (eq .GoType "[]string") (eq .GoType "[]float64") (eq .GoType "[]float32") (eq .GoType "[]bool") }} {{ template "flagnamevar" . }} - {{ $flagValueVar }}, err := cmd.Flags().Get{{pascalize .GoType}}Slice({{ $flagNameVar }}) + {{ flagValueVar .Name }}, err := cmd.Flags().Get{{pascalize .GoType}}Slice({{ flagNameVar .Name }}) if err != nil{ return err, false } - {{- /* reciever by convention is m for CLI */}} - m.{{ pascalize .Name }} = {{ $flagValueVar }} + {{- /* receiver by convention is m for CLI */}} + m.{{ pascalize .Name }} = {{ flagValueVar .Name }} {{- else if or (eq .GoType "[]strfmt.DateTime") (eq .GoType "[]strfmt.ObjectId") (eq .GoType "[]strfmt.UUID") }} {{/*Get flag value as string, then parse it*/}} {{ template "flagnamevar" . }} - {{ $flagValueVar }}Str, err := cmd.Flags().GetStringSlice({{ $flagNameVar }}) + {{ flagValueVar .Name }}Str, err := cmd.Flags().GetStringSlice({{ flagNameVar .Name }}) if err != nil{ return err, false } - {{ $flagValueVar }} := make({{ .GoType }}, len({{ $flagValueVar }}Str)) - for i, v := range {{ $flagValueVar }}Str { - if err := {{ $flagValueVar }}[i].UnmarshalText([]byte(v)); err != nil{ + {{ flagValueVar .Name }} := make({{ .GoType }}, len({{ flagValueVar .Name }}Str)) + for i, v := range {{ flagValueVar .Name }}Str { + if err := {{ flagValueVar .Name }}[i].UnmarshalText([]byte(v)); err != nil{ return err, false } } - m.{{ pascalize .Name }} = {{- if .IsNullable }}&{{- end }}{{ $flagValueVar }} + m.{{ pascalize .Name }} = {{- if .IsNullable }}&{{- end }}{{ flagValueVar .Name }} {{- else }} // warning: array {{.Name}} {{.GoType }} is not supported by go-swagger cli yet {{- end }} -{{ end }} \ No newline at end of file +{{ end }} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/schema.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/schema.gotmpl index 2dc42aebc..cd5b13ca5 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/schema.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/schema.gotmpl @@ -14,12 +14,12 @@ {{- if .IsPrimitive }} {{ template "primitiveregistrator" . }} {{- else if .IsArray }} - // warning: {{.Name}} {{ .GoType }} array type is not supported by go-swagger cli yet + // warning: {{.Name}} {{ .GoType }} array type is not supported by go-swagger cli yet {{- else if .IsMap }} // warning: {{.Name}} {{ .GoType }} map type is not supported by go-swagger cli yet {{- else if .IsComplexObject }} {{/* struct case */}} {{ template "flagnamevar" . }} - if err := registerModel{{pascalize (dropPackage .GoType) }}Flags(depth + 1, {{ camelize .Name }}FlagName, cmd); err != nil{ + if err := registerModel{{pascalize (dropPackage .GoType) }}Flags(depth + 1, {{ flagNameVar .Name }}, cmd); err != nil{ return err } {{- else }} @@ -28,10 +28,8 @@ {{ end }} {{ define "propertyretriever" }} - {{- $flagNameVar := printf "%vFlagName" (camelize .Name) }} - {{- $flagValueVar := printf "%vFlagValue" (camelize .Name) }} - {{ $flagNameVar }} := fmt.Sprintf("%v.{{ .Name }}", cmdPrefix) - if cmd.Flags().Changed({{ $flagNameVar }}) { + {{ flagNameVar .Name }} := fmt.Sprintf("%v.{{ .Name }}", cmdPrefix) + if cmd.Flags().Changed({{ flagNameVar .Name }}) { {{- if .IsPrimitive }} {{ template "primitiveretriever" . }} retAdded = true @@ -46,18 +44,18 @@ {{- end }} } {{- if and .IsComplexObject (not .IsArray) (not .IsMap) (not .IsStream) }} - {{ $flagValueVar }} := m.{{pascalize .Name}} - if swag.IsZero({{ $flagValueVar }}){ - {{ $flagValueVar }} = {{if .IsNullable }}&{{end}}{{if containsPkgStr .GoType}}{{ .GoType }}{{else}}{{ .Pkg }}.{{.GoType}}{{ end }}{} + {{ flagValueVar .Name }} := m.{{pascalize .Name}} + if swag.IsZero({{ flagValueVar .Name }}){ + {{ flagValueVar .Name }} = {{if .IsNullable }}&{{end}}{{if containsPkgStr .GoType}}{{ .GoType }}{{else}}{{ .Pkg }}.{{.GoType}}{{ end }}{} } {{/* always lift the payload to pointer and pass to model retrieve function. If .GoType has pkg str, use it, else use .Pkg+.GoType */}} - err, {{camelize .Name }}Added := retrieveModel{{pascalize (dropPackage .GoType) }}Flags(depth + 1, {{if not .IsNullable }}&{{end}}{{ $flagValueVar }}, {{ $flagNameVar }}, cmd) + err, {{pascalize .Name }}Added := retrieveModel{{pascalize (dropPackage .GoType) }}Flags(depth + 1, {{if not .IsNullable }}&{{end}}{{ flagValueVar .Name }}, {{ flagNameVar .Name }}, cmd) if err != nil{ return err, false } - retAdded = retAdded || {{camelize .Name }}Added - if {{camelize .Name }}Added { - m.{{pascalize .Name}} = {{ $flagValueVar }} + retAdded = retAdded || {{pascalize .Name }}Added + if {{pascalize .Name }}Added { + m.{{pascalize .Name}} = {{ flagValueVar .Name }} } {{- end }} {{ end }} @@ -85,14 +83,14 @@ func registerModel{{pascalize .Name}}Flags(depth int, cmdPrefix string, cmd *cob // register anonymous fields for {{.Name}} {{ $anonName := .Name }} {{ range .Properties }} - if err := register{{ pascalize $modelName }}Anon{{pascalize $anonName }}{{ pascalize .Name }}(depth, cmdPrefix, cmd); err != nil{ + if err := register{{ pascalize $modelName }}PropAnon{{pascalize $anonName }}{{ pascalize .Name }}(depth, cmdPrefix, cmd); err != nil{ return err } {{ end }} {{ end }} {{ end }} {{ range .Properties }} - if err := register{{ pascalize $modelName }}{{ pascalize .Name }}(depth, cmdPrefix, cmd); err != nil{ + if err := register{{ pascalize $modelName }}Prop{{ pascalize .Name }}(depth, cmdPrefix, cmd); err != nil{ return err } {{ end }} @@ -104,7 +102,7 @@ func registerModel{{pascalize .Name}}Flags(depth int, cmdPrefix string, cmd *cob // inline definition name {{ .Name }}, type {{.GoType}} {{ $anonName := .Name }} {{ range .Properties }} -func register{{ pascalize $modelName }}Anon{{pascalize $anonName }}{{ pascalize .Name }}(depth int, cmdPrefix string, cmd *cobra.Command) error { +func register{{ pascalize $modelName }}PropAnon{{pascalize $anonName }}{{ pascalize .Name }}(depth int, cmdPrefix string, cmd *cobra.Command) error { if depth > maxDepth { return nil } @@ -117,7 +115,7 @@ func register{{ pascalize $modelName }}Anon{{pascalize $anonName }}{{ pascalize {{/*register functions for each fields in this model */}} {{ range .Properties }} -func register{{ pascalize $modelName }}{{ pascalize .Name }}(depth int, cmdPrefix string, cmd *cobra.Command) error{ +func register{{ pascalize $modelName }}Prop{{ pascalize .Name }}(depth int, cmdPrefix string, cmd *cobra.Command) error{ if depth > maxDepth { return nil } @@ -133,11 +131,11 @@ func retrieveModel{{pascalize $modelName }}Flags(depth int, m *{{if containsPkgS {{- if not .IsAnonymous }}{{/* named type composition */}} {{ if or .IsPrimitive .IsComplexObject }} // retrieve model {{.GoType}} - err, {{camelize .Name }}Added := retrieveModel{{ pascalize (dropPackage .GoType) }}Flags(depth, &m.{{pascalize (dropPackage .GoType) }}, cmdPrefix, cmd) + err, {{pascalize .Name }}Added := retrieveModel{{ pascalize (dropPackage .GoType) }}Flags(depth, &m.{{pascalize (dropPackage .GoType) }}, cmdPrefix, cmd) if err != nil{ return err, false } - retAdded = retAdded || {{camelize .Name }}Added + retAdded = retAdded || {{pascalize .Name }}Added {{ else }} {{/*inline anonymous case*/}} {{ end }} @@ -145,20 +143,20 @@ func retrieveModel{{pascalize $modelName }}Flags(depth int, m *{{if containsPkgS // retrieve allOf {{.Name}} fields {{ $anonName := .Name }} {{ range .Properties }} - err, {{camelize .Name}}Added := retrieve{{ pascalize $modelName }}Anon{{pascalize $anonName }}{{ pascalize .Name }}Flags(depth, m, cmdPrefix, cmd) + err, {{pascalize .Name}}Added := retrieve{{ pascalize $modelName }}PropAnon{{pascalize $anonName }}{{ pascalize .Name }}Flags(depth, m, cmdPrefix, cmd) if err != nil{ return err, false } - retAdded = retAdded || {{ camelize .Name }}Added + retAdded = retAdded || {{ pascalize .Name }}Added {{ end }} {{- end }} {{ end }} {{ range .Properties }} - err, {{ camelize .Name }}Added := retrieve{{pascalize $modelName }}{{pascalize .Name }}Flags(depth, m, cmdPrefix, cmd) + err, {{ pascalize .Name }}Added := retrieve{{pascalize $modelName }}Prop{{pascalize .Name }}Flags(depth, m, cmdPrefix, cmd) if err != nil{ return err, false } - retAdded = retAdded || {{ camelize .Name }}Added + retAdded = retAdded || {{ pascalize .Name }}Added {{ end }} return nil, retAdded } @@ -168,7 +166,7 @@ func retrieveModel{{pascalize $modelName }}Flags(depth int, m *{{if containsPkgS // define retrieve functions for fields for inline definition name {{ .Name }} {{ $anonName := .Name }} {{ range .Properties }} {{/*anonymous fields will be registered directly on parent model*/}} -func retrieve{{ pascalize $modelName }}Anon{{pascalize $anonName }}{{ pascalize .Name }}Flags(depth int, m *{{if containsPkgStr $modelType}}{{ $modelType }}{{else}}{{ $modelPkg }}.{{$modelType}}{{ end }},cmdPrefix string, cmd *cobra.Command) (error,bool) { +func retrieve{{ pascalize $modelName }}PropAnon{{pascalize $anonName }}{{ pascalize .Name }}Flags(depth int, m *{{if containsPkgStr $modelType}}{{ $modelType }}{{else}}{{ $modelPkg }}.{{$modelType}}{{ end }},cmdPrefix string, cmd *cobra.Command) (error,bool) { if depth > maxDepth { return nil, false } @@ -181,7 +179,7 @@ func retrieve{{ pascalize $modelName }}Anon{{pascalize $anonName }}{{ pascalize {{ end }} {{ range .Properties }} -func retrieve{{pascalize $modelName }}{{pascalize .Name }}Flags(depth int, m *{{if $modelPkg}}{{$modelPkg}}.{{ dropPackage $modelType }}{{else}}{{ $modelType }}{{end}}, cmdPrefix string, cmd *cobra.Command) (error, bool) { +func retrieve{{pascalize $modelName }}Prop{{pascalize .Name }}Flags(depth int, m *{{if $modelPkg}}{{$modelPkg}}.{{ dropPackage $modelType }}{{else}}{{ $modelType }}{{end}}, cmdPrefix string, cmd *cobra.Command) (error, bool) { if depth > maxDepth { return nil, false } @@ -190,4 +188,4 @@ func retrieve{{pascalize $modelName }}{{pascalize .Name }}Flags(depth int, m *{{ return nil, retAdded } {{ end }} {{/*properties*/}} -{{ end }} {{/*define*/}} \ No newline at end of file +{{ end }} {{/*define*/}} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/client/client.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/client.gotmpl index 3d01e9dcc..85a996372 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/client/client.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/client.gotmpl @@ -16,6 +16,7 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" + httptransport "github.com/go-openapi/runtime/client" "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" @@ -29,6 +30,31 @@ func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientServi return &Client{transport: transport, formats: formats} } +// New creates a new {{ humanize .Name }} API client with basic auth credentials. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - user: user for basic authentication header. +// - password: password for basic authentication header. +func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BasicAuth(user, password) + return &Client{transport: transport, formats: strfmt.Default} +} + +// New creates a new {{ humanize .Name }} API client with a bearer token for authentication. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - bearerToken: bearer token for Bearer authentication header. +func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BearerToken(bearerToken) + return &Client{transport: transport, formats: strfmt.Default} +} + /* Client {{ if .Summary }}{{ .Summary }}{{ if .Description }} @@ -39,9 +65,58 @@ type Client struct { formats strfmt.Registry } -// ClientOption is the option for Client methods +// ClientOption may be used to customize the behavior of Client methods. type ClientOption func(*runtime.ClientOperation) +{{- with .ClientOptions }}{{/* use ad'hoc function mediaGoName rather than pascalize because of patterns with * */}} + +// This client is generated with a few options you might find useful for your swagger spec. +// +// Feel free to add you own set of options. + {{- if gt (len .ConsumesMediaTypes) 1 }} + +// WithContentType allows the client to force the Content-Type header +// to negotiate a specific Consumer from the server. +// +// You may use this option to set arbitrary extensions to your MIME media type. +func WithContentType(mime string) ClientOption { + return func(r *runtime.ClientOperation) { + r.ConsumesMediaTypes = []string{mime} + } +} + {{ range .ConsumesMediaTypes }} + {{- if not ( eq (mediaGoName .) "" ) }}{{/* guard: in case garbled input produces a (conflicting) empty name */}} + +// WithContentType{{ mediaGoName . }} sets the Content-Type header to {{ printf "%q" . }}. +func WithContentType{{ mediaGoName . }}(r *runtime.ClientOperation) { + r.ConsumesMediaTypes = []string{ {{ printf "%q" . }} } +} + {{- end }} + {{- end }} + {{- end }} + {{- if gt (len .ProducesMediaTypes) 1 }} + +// WithAccept allows the client to force the Accept header +// to negotiate a specific Producer from the server. +// +// You may use this option to set arbitrary extensions to your MIME media type. +func WithAccept(mime string) ClientOption { + return func(r *runtime.ClientOperation) { + r.ProducesMediaTypes = []string{mime} + } +} + {{ range .ProducesMediaTypes }} + {{- if not ( eq (mediaGoName .) "" ) }}{{/* guard: in case garbled input produces a (conflicting) empty name */}} + +// WithAccept{{ mediaGoName . }} sets the Accept header to {{ printf "%q" . }}. +func WithAccept{{ mediaGoName . }}(r *runtime.ClientOperation) { + r.ProducesMediaTypes = []string{ {{ printf "%q" . }} } +} + {{- end }} + {{- end }} + {{- end }} +{{- end }} + // ClientService is the interface for Client methods type ClientService interface { {{ range .Operations }} @@ -124,4 +199,4 @@ func (a *Client) {{ pascalize .Name }}(params *{{ pascalize .Name }}Params{{ if // SetTransport changes the transport on the client func (a *Client) SetTransport(transport runtime.ClientTransport) { a.transport = transport -} \ No newline at end of file +} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/client/response.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/response.gotmpl index d62238540..dce21aa2f 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/client/response.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/response.gotmpl @@ -119,11 +119,17 @@ func ({{ .ReceiverName }} *{{ pascalize .Name }}) Code() int { } func ({{ .ReceiverName }} *{{ pascalize .Name }}) Error() string { - return fmt.Sprintf("[{{ upper .Method }} {{ .Path }}][%d] {{ if .Name }}{{ .Name }} {{ else }}unknown error {{ end }}{{ if .Schema }} %+v{{ end }}", {{ if eq .Code -1 }}{{ .ReceiverName }}._statusCode{{ else }}{{ .Code }}{{ end }}{{ if .Schema }}, o.Payload{{ end }}) + {{- if .Schema }}{{ if (not .Schema.IsStream) }} + payload, _ := json.Marshal(o.Payload) + {{- end }}{{- end }} + return fmt.Sprintf("[{{ upper .Method }} {{ .Path }}][%d]{{ if .Name }} {{ .Name }}{{ else }} unknown error{{ end }}{{ if .Schema }}{{ if not .Schema.IsStream }} %s{{ end }}{{ end }}", {{ if eq .Code -1 }}{{ .ReceiverName }}._statusCode{{ else }}{{ .Code }}{{ end }}{{ if .Schema }}{{ if not .Schema.IsStream }}, payload{{ end }}{{ end }}) } func ({{ .ReceiverName }} *{{ pascalize .Name }}) String() string { - return fmt.Sprintf("[{{ upper .Method }} {{ .Path }}][%d] {{ if .Name }}{{ .Name }} {{ else }}unknown response {{ end }}{{ if .Schema }} %+v{{ end }}", {{ if eq .Code -1 }}{{ .ReceiverName }}._statusCode{{ else }}{{ .Code }}{{ end }}{{ if .Schema }}, o.Payload{{ end }}) + {{- if .Schema }}{{ if (not .Schema.IsStream) }} + payload, _ := json.Marshal(o.Payload) + {{- end }}{{- end }} + return fmt.Sprintf("[{{ upper .Method }} {{ .Path }}][%d]{{ if .Name }} {{ .Name }}{{ else }} unknown response{{ end }}{{ if .Schema }}{{ if not .Schema.IsStream }} %s{{ end }}{{ end }}", {{ if eq .Code -1 }}{{ .ReceiverName }}._statusCode{{ else }}{{ .Code }}{{ end }}{{ if .Schema }}{{ if not .Schema.IsStream }}, payload{{ end }}{{ end }}) } {{ if .Schema }} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/README.md b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/README.md index 1d36d66f5..9ef4fea80 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/README.md +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/README.md @@ -71,7 +71,7 @@ type PetAPI interface { PetUpdate(ctx context.Context, params pet.PetUpdateParams) middleware.Responder } -//go:generate mockery -name StoreAPI -inpkg +//go:generate mockery --name StoreAPI --inpackage // StoreAPI type StoreAPI interface { diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/configureapi.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/configureapi.gotmpl index eaee9701f..03f8d6745 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/configureapi.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/configureapi.gotmpl @@ -29,7 +29,7 @@ type contextKey string const AuthKey contextKey = "Auth" {{ range .OperationGroups -}} -//go:generate mockery -name {{ pascalize .Name}}API -inpkg +//go:generate mockery --name {{ pascalize .Name}}API --inpackage /* {{ pascalize .Name }}API {{ .Description }} */ type {{ pascalize .Name }}API interface { diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/docstring.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/docstring.gotmpl index 8e7108be1..a0a9d123e 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/docstring.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/docstring.gotmpl @@ -10,13 +10,13 @@ {{- else }} {{- humanize .Name }} {{- end }} - {{- if or .MinProperties .MinProperties }} + {{- if or .MinProperties .MaxProperties }} // {{- if .MinProperties }} -// Min Properties: {{ .MinProperties }} +// MinProperties: {{ .MinProperties }} {{- end }} {{- if .MaxProperties }} -// Max Properties: {{ .MaxProperties }} +// MaxProperties: {{ .MaxProperties }} {{- end }} {{- end }} {{- if .Example }} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/markdown/docs.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/markdown/docs.gotmpl index 8b7c6b3dd..79461d1d0 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/markdown/docs.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/markdown/docs.gotmpl @@ -2,18 +2,18 @@ {{- with .ExternalDocs }} {{- if .URL }} {{- if .Description }} -> [{{ trimSpace .Description }}]({{ .URL }}) +> [{{ mdBlock .Description }}]({{ .URL }}) {{- else }} > [Read more]({{ .URL }}) {{- end }} {{- else }} -> {{ trimSpace .Description }} +> {{ mdBlock .Description }} {{- end }} {{- end }} {{- end }} {{- define "docParam" }}{{/* renders a parameter with simple schema */}} -| {{ .Name }} | `{{ .Location }}` | {{ paramDocType . }} | `{{ .GoType }}` | {{ if .CollectionFormat }}`{{ docCollectionFormat .CollectionFormat .Child }}`{{ end }} | {{ if .Required }}โœ“{{ end }} | {{ if .Default }}`{{ json .Default }}`{{ end }} | {{ trimSpace .Description }} | +| {{ .Name }} | `{{ .Location }}` | {{ paramDocType . }} | `{{ .GoType }}` | {{ if .CollectionFormat }}`{{ docCollectionFormat .CollectionFormat .Child }}`{{ end }} | {{ if .Required }}โœ“{{ end }} | {{ if .Default }}`{{ json .Default }}`{{ end }} | {{ mdBlock .Description }} | {{- end }} {{- define "docModelSchema" }}{{/* renders a schema */}} @@ -46,7 +46,7 @@ {{- else if and .IsAliased .IsPrimitive (not .IsSuperAlias) -}} | Name | Type | Go type | Default | Description | Example | |------|------|---------| ------- |-------------|---------| -| {{ .Name }} | {{ schemaDocType . }}| {{ .AliasedType }} | {{ if .Default }}`{{ json .Default }}`{{ end }}| {{ trimSpace .Description }} | {{ if .Example }}`{{ .Example }}`{{ end }} | +| {{ .Name }} | {{ schemaDocType . }}| {{ .AliasedType }} | {{ if .Default }}`{{ json .Default }}`{{ end }}| {{ mdBlock .Description }} | {{ if .Example }}`{{ .Example }}`{{ end }} | {{ printf "\n" }} {{- else if or (and .IsAliased (not (.IsAdditionalProperties))) (and .IsComplexObject (not .Properties) (not .AllOf)) -}} [{{- dropPackage .GoType }}](#{{ dasherize (dropPackage .GoType) -}}) @@ -71,7 +71,7 @@ any | Name | Type | Go type | Required | Default | Description | Example | |------|------|---------|:--------:| ------- |-------------|---------| {{- range .Properties }} -| {{ .Name }} | {{ template "docSchemaSimple" . }}| `{{ .GoType }}` | {{ if .Required }}โœ“{{ end }} | {{ if .Default }}`{{ json .Default }}`{{ end }}| {{ trimSpace .Description }} | {{ if .Example }}`{{ .Example }}`{{ end }} | +| {{ .Name }} | {{ template "docSchemaSimple" . }}| `{{ .GoType }}` | {{ if .Required }}โœ“{{ end }} | {{ if .Default }}`{{ json .Default }}`{{ end }}| {{ mdBlock .Description }} | {{ if .Example }}`{{ .Example }}`{{ end }} | {{- end }} {{ printf "\n" }} {{- end }} @@ -86,7 +86,7 @@ any | Type | Go type | Default | Description | Example | |------|---------| ------- |-------------|---------| -| {{ template "docSchemaSimple" . }} | `{{ .GoType }}` |{{ if .Default }}`{{ json .Default }}`{{ end }}| {{ trimSpace .Description }} | {{ if .Example }}`{{ .Example }}`{{ end }} | +| {{ template "docSchemaSimple" . }} | `{{ .GoType }}` |{{ if .Default }}`{{ json .Default }}`{{ end }}| {{ mdBlock .Description }} | {{ if .Example }}`{{ .Example }}`{{ end }} | {{- else }} {{ template "docModelSchema" . }} @@ -104,7 +104,7 @@ any | Type | Go type | Default | Description | Example | |------|---------| ------- |-------------|---------| -| {{ template "docSchemaSimple" . }} | `{{ .GoType }}` |{{ if .Default }}`{{ json .Default }}`{{ end }}| {{ trimSpace .Description }} | {{ if .Example }}`{{ .Example }}`{{ end }} | +| {{ template "docSchemaSimple" . }} | `{{ .GoType }}` |{{ if .Default }}`{{ json .Default }}`{{ end }}| {{ mdBlock .Description }} | {{ if .Example }}`{{ .Example }}`{{ end }} | {{- else }} {{ template "docModelSchema" . }} @@ -161,7 +161,7 @@ any {{- end }} {{- define "docModelBodyParam" }}{{/* layout for body param schema */}} -| {{ .Name }} | `body` | {{ template "docSchemaSimple" .Schema }} | `{{ .Schema.GoType }}` | | {{ if .Required }}โœ“{{ end }} | {{ if .Default }}`{{ json .Default }}`{{ end }}| {{ trimSpace .Description }} | +| {{ .Name }} | `body` | {{ template "docSchemaSimple" .Schema }} | `{{ .Schema.GoType }}` | | {{ if .Required }}โœ“{{ end }} | {{ if .Default }}`{{ json .Default }}`{{ end }}| {{ mdBlock .Description }} | {{- end }} {{- define "docHeaders" }}{{/* renders response headers */}} @@ -169,7 +169,7 @@ any | Name | Type | Go type | Separator | Default | Description | |------|------|---------|-----------|---------|-------------| {{- range .Headers }} -| {{ .Name }} | {{ headerDocType . }} | `{{ .GoType }}` | {{ if .CollectionFormat }}`{{ docCollectionFormat .CollectionFormat .Child }}`{{ end }} | {{ if .Default }}`{{ json .Default }}`{{ end }} | {{ trimSpace .Description }} | +| {{ .Name }} | {{ headerDocType . }} | `{{ .GoType }}` | {{ if .CollectionFormat }}`{{ docCollectionFormat .CollectionFormat .Child }}`{{ end }} | {{ if .Default }}`{{ json .Default }}`{{ end }} | {{ mdBlock .Description }} | {{- end }} {{- end }} {{- end }} @@ -350,7 +350,7 @@ Name | Description {{- range .Operations }} {{- $opname := .Name }} -### {{ if .Summary }}{{ trimSpace .Summary }}{{ else }}{{ humanize .Name }}{{ end }} (*{{ .Name }}*) +### {{ if .Summary }}{{ mdBlock .Summary }}{{ else }}{{ humanize .Name }}{{ end }} (*{{ .Name }}*) ``` {{ upper .Method }} {{ joinPath .BasePath .Path }} @@ -424,16 +424,16 @@ Name | Description | Code | Status | Description | Has headers | Schema | |------|--------|-------------|:-----------:|--------| {{- range .Responses }} -| [{{.Code}}](#{{ dasherize $opname }}-{{ .Code }}) | {{ httpStatus .Code }} | {{ trimSpace .Description }} | {{ if .Headers }}โœ“{{ end }} | [schema](#{{ dasherize $opname }}-{{ .Code }}-schema) | +| [{{.Code}}](#{{ dasherize $opname }}-{{ .Code }}) | {{ httpStatus .Code }} | {{ mdBlock .Description }} | {{ if .Headers }}โœ“{{ end }} | [schema](#{{ dasherize $opname }}-{{ .Code }}-schema) | {{- end }} {{- with .DefaultResponse }} -| [default](#{{ dasherize $opname }}-default) | | {{ trimSpace .Description }} | {{ if .Headers }}โœ“{{ end }} | [schema](#{{ dasherize $opname }}-default-schema) | +| [default](#{{ dasherize $opname }}-default) | | {{ mdBlock .Description }} | {{ if .Headers }}โœ“{{ end }} | [schema](#{{ dasherize $opname }}-default-schema) | {{- end }} #### Responses {{ range .Responses }} -##### {{.Code}}{{ if .Description }} - {{ trimSpace .Description }}{{ end }} +##### {{.Code}}{{ if .Description }} - {{ mdBlock .Description }}{{ end }} Status: {{ httpStatus .Code }} ###### Schema @@ -462,7 +462,7 @@ Status: {{ httpStatus .Code }} {{- with .DefaultResponse }} ##### Default Response -{{ trimSpace .Description }} +{{ mdBlock .Description }} ###### Schema {{- if .Schema }} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/schemavalidator.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/schemavalidator.gotmpl index 61684acd0..cee8a5dd6 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/schemavalidator.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/schemavalidator.gotmpl @@ -1,6 +1,6 @@ {{ define "primitivefieldcontextvalidator" }} {{ if .ReadOnly }} - if err := validate.ReadOnly(ctx, {{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil{ + if err := validate.ReadOnly(ctx, {{ path . }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil{ return err } {{ end }} @@ -8,25 +8,25 @@ {{ define "primitivefieldvalidator" }} {{ if .Required }} {{- if and (eq .GoType "string") (not .IsNullable) }} - if err := validate.RequiredString({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsAliased }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if .IsAliased }}){{ end }}); err != nil { + if err := validate.RequiredString({{ path . }}, {{ printf "%q" .Location }}, {{ if .IsAliased }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if .IsAliased }}){{ end }}); err != nil { {{- else }} - if err := validate.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { + if err := validate.Required({{ path . }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { {{- end }} return err } {{- end }} {{ if .MinLength }} - if err := validate.MinLength({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ToString }}, {{.MinLength }}); err != nil { + if err := validate.MinLength({{ path . }}, {{ printf "%q" .Location }}, {{ .ToString }}, {{.MinLength }}); err != nil { return err } {{- end }} {{ if .MaxLength }} - if err := validate.MaxLength({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ToString }}, {{.MaxLength }}); err != nil { + if err := validate.MaxLength({{ path . }}, {{ printf "%q" .Location }}, {{ .ToString }}, {{.MaxLength }}); err != nil { return err } {{ end }} {{ if .Pattern }} - if err := validate.Pattern({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ToString }}, `{{ escapeBackticks .Pattern }}`); err != nil { + if err := validate.Pattern({{ path . }}, {{ printf "%q" .Location }}, {{ .ToString }}, `{{ escapeBackticks .Pattern }}`); err != nil { return err } {{- end }} @@ -41,7 +41,7 @@ {{ end }} {{ if .Enum }} // value enum - if err := {{.ReceiverName }}.validate{{ pascalize .Name }}{{ .Suffix }}Enum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}); err != nil { + if err := {{.ReceiverName }}.validate{{ pascalize .Name }}{{ .Suffix }}Enum({{ path . }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}); err != nil { return err } {{- end }} @@ -52,7 +52,7 @@ {{ define "slicecontextvalidator" }} {{ if .ReadOnly }} - if err := validate.ReadOnly(ctx, {{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil{ + if err := validate.ReadOnly(ctx, {{ path . }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil{ return err } {{ end }} @@ -71,9 +71,9 @@ {{- end }} if err := {{.ValueExpression }}.ContextValidate(ctx, formats); err != nil { if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}) + return ve.ValidateName({{ path . }}) } else if ce, ok := err.(*errors.CompositeError); ok { - return ce.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}) + return ce.ValidateName({{ path . }}) } return err } @@ -86,7 +86,7 @@ {{define "slicevalidator" }} {{ if .Required }} - if err := validate.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ValueExpression }}); err != nil { + if err := validate.Required({{ path . }}, {{ printf "%q" .Location }}, {{ .ValueExpression }}); err != nil { return err } {{ end }} @@ -94,23 +94,23 @@ {{ .IndexVar }}{{ pascalize .Name }}Size := int64(len({{.ValueExpression }})) {{ end }} {{ if .MinItems }} - if err := validate.MinItems({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .IndexVar }}{{ pascalize .Name }}Size, {{.MinItems }}); err != nil { + if err := validate.MinItems({{ path . }}, {{ printf "%q" .Location }}, {{ .IndexVar }}{{ pascalize .Name }}Size, {{.MinItems }}); err != nil { return err } {{ end }} {{ if .MaxItems }} - if err := validate.MaxItems({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .IndexVar }}{{ pascalize .Name }}Size, {{.MaxItems }}); err != nil { + if err := validate.MaxItems({{ path . }}, {{ printf "%q" .Location }}, {{ .IndexVar }}{{ pascalize .Name }}Size, {{.MaxItems }}); err != nil { return err } {{ end }} {{ if .UniqueItems }} - if err := validate.UniqueItems({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{.ValueExpression }}); err != nil { + if err := validate.UniqueItems({{ path . }}, {{ printf "%q" .Location }}, {{.ValueExpression }}); err != nil { return err } {{ end }} {{ if .Enum }} // for slice - if err := {{.ReceiverName }}.validate{{ pascalize .Name }}Enum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{.ValueExpression }}); err != nil { + if err := {{.ReceiverName }}.validate{{ pascalize .Name }}Enum({{ path . }}, {{ printf "%q" .Location }}, {{.ValueExpression }}); err != nil { return err } {{ end }} @@ -138,9 +138,9 @@ {{- end }} if err := {{.ValueExpression }}.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}) + return ve.ValidateName({{ path . }}) } else if ce, ok := err.(*errors.CompositeError); ok { - return ce.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}) + return ce.ValidateName({{ path . }}) } return err } @@ -154,10 +154,10 @@ {{- if and .Required }} {{- if or .IsNullable .IsInterface }} if {{ .ReceiverName }}.{{ pascalize .Name }} == nil { - return errors.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, nil) + return errors.Required({{ path . }}, {{ printf "%q" .Location }}, nil) } {{- else }} - if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{ .ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { + if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ path . }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{ .ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { return err } {{- end }} @@ -213,7 +213,7 @@ {{ template "mapcontextvalidator" . }} {{- else if and .IsMap .IsInterface }} {{ if .Enum }} - if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}ValueEnum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ $validatedValues }}[{{ $keyVar }}]); err != nil { + if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}ValueEnum({{ path . }}, {{ printf "%q" .Location }}, {{ $validatedValues }}[{{ $keyVar }}]); err != nil { return err } {{- end }} @@ -278,10 +278,10 @@ {{- if and .Required }} {{- if or .IsNullable .IsInterface }} if {{ .ReceiverName }}.{{ pascalize .Name }} == nil { - return errors.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, nil) + return errors.Required({{ path . }}, {{ printf "%q" .Location }}, nil) } {{- else }} - if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{ .ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { + if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ path . }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{ .ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { return err } {{- end }} @@ -295,12 +295,12 @@ {{- if .IsInterface }} if {{ $validatedValues }}[{{ $keyVar }}] == nil { // not required {{- else }} - if swag.IsZero({{ $validatedValues }}[{{ $keyVar }}]) { // not required + if swag.IsZero({{ .ValueExpression }}) { // not required {{- end }} continue } {{- else if and (.Required) (not .IsArray) }}{{/* Required slice is processed below */}} - if err := validate.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ $validatedValues }}[{{ $keyVar }}]); err != nil { + if err := validate.Required({{ path . }}, {{ printf "%q" .Location }}, {{ $validatedValues }}[{{ $keyVar }}]); err != nil { return err } {{- end }} @@ -313,9 +313,9 @@ {{- end }} if err := val.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}) + return ve.ValidateName({{ path . }}) } else if ce, ok := err.(*errors.CompositeError); ok { - return ce.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}) + return ce.ValidateName({{ path . }}) } return err } @@ -342,7 +342,7 @@ {{- end }} {{- else if and .IsCustomFormatter (or .HasValidations .Required) }}{{/* custom format not captured as primitive */}} {{- if .Required }} - if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { + if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ path . }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { return err } {{- end }} @@ -352,15 +352,16 @@ {{- else if .IsArray }} {{ template "slicevalidator" . }} {{- else if and .IsMap (not .IsInterface) }} + {{ template "minmaxProperties" .}} {{ template "mapvalidator" . }} {{ if .Enum }} - if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}ValueEnum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ $validatedValues }}[{{ $keyVar }}]); err != nil { + if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}ValueEnum({{ path . }}, {{ printf "%q" .Location }}, {{ $validatedValues }}[{{ $keyVar }}]); err != nil { return err } {{- end }} {{- else if and .IsMap .IsInterface }} {{ if .Enum }} - if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}ValueEnum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ $validatedValues }}[{{ $keyVar }}]); err != nil { + if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}ValueEnum({{ path . }}, {{ printf "%q" .Location }}, {{ $validatedValues }}[{{ $keyVar }}]); err != nil { return err } {{- end }} @@ -372,9 +373,9 @@ {{- end }} if err := val.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}) + return ve.ValidateName({{ path . }}) } else if ce, ok := err.(*errors.CompositeError); ok { - return ce.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}) + return ce.ValidateName({{ path . }}) } return err } @@ -402,7 +403,7 @@ {{ end }} {{ if .Enum }} // from map - if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}Enum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ValueExpression }}); err != nil { + if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}Enum({{ path . }}, {{ printf "%q" .Location }}, {{ .ValueExpression }}); err != nil { return err } {{ end }} @@ -430,6 +431,11 @@ {{- end }} {{- end }} {{- end }} + {{- else if .Enum }} + // from map without additionalProperties + if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}Enum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ValueExpression }}); err != nil { + return err + } {{- end }} {{ end }} @@ -462,9 +468,9 @@ {{ end }} if err := {{.ValueExpression }}.ContextValidate(ctx, formats); err != nil { if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}) + return ve.ValidateName({{ path . }}) } else if ce, ok := err.(*errors.CompositeError); ok { - return ce.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}) + return ce.ValidateName({{ path . }}) } return err } @@ -490,14 +496,14 @@ {{ define "minmaxProperties" }} {{- if and (or .IsMap (and .IsAdditionalProperties .HasAdditionalProperties)) (or .MinProperties .MaxProperties) }} {{- if and (not .IsAdditionalProperties) (not .IsInterface) (eq (len .Properties) 0) }}{{/* map only */}} - nprops := len({{ if and (not .IsAliased) .HasAdditionalProperties }}{{ .ReceiverName }}{{ else }}{{ .ValueExpression }}{{ end }}) + nprops := len({{ if and .IsMap (not .IsAliased) .HasAdditionalProperties (not .IsElem) (not .IsProperty) }}{{ .ReceiverName }}{{ else }}{{ .ValueExpression }}{{ end }}) {{- else }}{{/* object with properties */}} {{- if and .IsNullable .MinProperties }} {{- if gt0 .MinProperties }} // short circuits minProperties > 0 if {{ .ReceiverName }} == nil { - return errors.TooFewProperties({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ .MinProperties }}) + return errors.TooFewProperties({{ path . }}, {{ printf "%q" .Location }}, {{ .MinProperties }}) } {{- end }} {{- end }} @@ -517,13 +523,13 @@ {{ if .MinProperties }} // minProperties: {{ .MinProperties }} if nprops < {{ .MinProperties }} { - return errors.TooFewProperties({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ .MinProperties }}) + return errors.TooFewProperties({{ path . }}, {{ printf "%q" .Location }}, {{ .MinProperties }}) } {{- end }} {{ if .MaxProperties }} // maxProperties: {{ .MaxProperties }} if nprops > {{ .MaxProperties }} { - return errors.TooManyProperties({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ .MaxProperties }}) + return errors.TooManyProperties({{ path . }}, {{ printf "%q" .Location }}, {{ .MaxProperties }}) } {{- end }} {{- end }} @@ -548,7 +554,7 @@ */}} {{- if not .IsAnonymous }} {{- if and .Required (or .IsNullable .IsBaseType .IsMap) }} - if err := validate.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{.ValueExpression }}); err != nil { + if err := validate.Required({{ path . }}, {{ printf "%q" .Location }}, {{.ValueExpression }}); err != nil { return err } {{- if and (not .Required) .IsBaseType }} @@ -563,9 +569,9 @@ {{- end }} if err := {{.ValueExpression }}.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}) + return ve.ValidateName({{ path . }}) } else if ce, ok := err.(*errors.CompositeError); ok { - return ce.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}) + return ce.ValidateName({{ path . }}) } return err } @@ -602,7 +608,7 @@ // at https://github.com/go-swagger/go-swagger/issues {{- if .ReadOnly }} - if err := validate.ReadOnly{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { + if err := validate.ReadOnly{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ path . }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { return err } {{- end }} @@ -625,7 +631,7 @@ {{- if .IsPrimitive }} {{- if .IsAliased }} {{- if and .Required (not .IsAnonymous) }} - if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { + if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ path . }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { return err } {{- end }} @@ -635,7 +641,7 @@ {{- end }} {{- else if and .IsCustomFormatter (or .HasValidations .Required) }}{{/* custom format not captured as primitive */}} {{- if .Required }} - if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { + if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ path . }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { return err } {{- end }} @@ -651,10 +657,10 @@ {{- if and .IsAdditionalProperties .Required (not .IsAliased) }} {{- if or .IsNullable .IsInterface }} if {{ .ValueExpression }} == nil { - return errors.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ValueExpression }}) + return errors.Required({{ path . }}, {{ printf "%q" .Location }}, {{ .ValueExpression }}) } {{- else }} - if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { + if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ path . }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { return err } {{- end }} @@ -663,10 +669,10 @@ {{- else if and .IsExternal .Required }} {{- if or .IsNullable .IsInterface }} if {{ .ValueExpression }} == nil { - return errors.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ValueExpression }}) + return errors.Required({{ path . }}, {{ printf "%q" .Location }}, {{ .ValueExpression }}) } {{- else }} - if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { + if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ path . }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { return err } {{- end }} @@ -697,7 +703,7 @@ {{ template "primitivefieldvalidator" . }} {{- else if and .IsCustomFormatter (or .HasValidations .Required) }}{{/* custom format not captured as primitive */}} {{- if .Required }} - if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { + if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ path . }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil { return err } {{- end }} @@ -1034,11 +1040,11 @@ func ({{.ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else i {{- if and $.IsTuple .IsMap .Required }} {{- if .IsInterface }} if {{ .ValueExpression }} == nil { - return errors.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ValueExpression }}) + return errors.Required({{ path . }}, {{ printf "%q" .Location }}, {{ .ValueExpression }}) } {{- else }} if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}( - {{- if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, + {{ path . }}, {{ printf "%q" .Location }}, {{- if and (eq .GoType "string") (not (or .IsAnonymous .IsNullable)) }}{{ .GoType }}({{ end }} {{- .ValueExpression }} {{- if and (eq .GoType "string") (not (or .IsAnonymous .IsNullable)) }}){{ end }}); err != nil { diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/server.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/server.gotmpl index c78d22051..ccbc520b0 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/server.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/server.gotmpl @@ -230,7 +230,7 @@ type Server struct { ListenLimit int{{ if .UseGoStructFlags }} `long:"listen-limit" description:"limit the number of outstanding requests"`{{ end }} KeepAlive time.Duration{{ if .UseGoStructFlags }} `long:"keep-alive" description:"sets the TCP keep-alive timeouts on accepted connections. It prunes dead TCP connections ( e.g. closing laptop mid-download)" default:"3m"`{{ end }} ReadTimeout time.Duration{{ if .UseGoStructFlags }} `long:"read-timeout" description:"maximum duration before timing out read of the request" default:"30s"`{{ end }} - WriteTimeout time.Duration{{ if .UseGoStructFlags }} `long:"write-timeout" description:"maximum duration before timing out write of the response" default:"60s"`{{ end }} + WriteTimeout time.Duration{{ if .UseGoStructFlags }} `long:"write-timeout" description:"maximum duration before timing out write of the response" default:"30s"`{{ end }} httpServerL net.Listener TLSHost string{{ if .UseGoStructFlags }} `long:"tls-host" description:"the IP to listen on for tls, when not specified it's the same as --host" env:"TLS_HOST"`{{ end }} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/customformat.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/customformat.gotmpl index 354075a90..a69e2d7a7 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/customformat.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/customformat.gotmpl @@ -1,3 +1,3 @@ -if err := validate.FormatOf({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ printf "%q" .SwaggerFormat }}, {{ .ToString }}, formats); err != nil { +if err := validate.FormatOf({{ path . }}, {{ printf "%q" .Location }}, {{ printf "%q" .SwaggerFormat }}, {{ .ToString }}, formats); err != nil { return err } diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/maximum.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/maximum.gotmpl index 993f7344f..fb8a2b3de 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/maximum.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/maximum.gotmpl @@ -1,21 +1,21 @@ {{- if or (hasPrefix .UnderlyingType "int") }} {{- if and (hasPrefix .UnderlyingType "int64") (not .IsAliased) }} -if err := validate.MaximumInt({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil { +if err := validate.MaximumInt({{ path . }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil { {{- else }} -if err := validate.MaximumInt({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, int64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil { +if err := validate.MaximumInt({{ path . }}, {{ printf "%q" .Location }}, int64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil { {{- end }} {{- else }} {{- if hasPrefix .UnderlyingType "uint" }} {{- if and (hasPrefix .UnderlyingType "uint64") (not .IsAliased) }} -if err := validate.MaximumUint({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil { +if err := validate.MaximumUint({{ path . }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil { {{- else }} -if err := validate.MaximumUint({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, uint64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil { +if err := validate.MaximumUint({{ path . }}, {{ printf "%q" .Location }}, uint64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil { {{- end }} {{- else }} {{- if and (eq .UnderlyingType "float64") (not .IsAliased) }} -if err := validate.Maximum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil { +if err := validate.Maximum({{ path . }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil { {{- else }} -if err := validate.Maximum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, float64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil { +if err := validate.Maximum({{ path . }}, {{ printf "%q" .Location }}, float64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil { {{- end }} {{- end }} {{- end }} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/minimum.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/minimum.gotmpl index 626c207cb..fda5ba4e0 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/minimum.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/minimum.gotmpl @@ -1,21 +1,21 @@ {{- if hasPrefix .UnderlyingType "int" }} {{- if and (hasPrefix .UnderlyingType "int64") (not .IsAliased) }} -if err := validate.MinimumInt({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil { +if err := validate.MinimumInt({{ path . }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil { {{- else }} -if err := validate.MinimumInt({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, int64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil { +if err := validate.MinimumInt({{ path . }}, {{ printf "%q" .Location }}, int64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil { {{- end }} {{- else }} {{- if hasPrefix .UnderlyingType "uint" }} {{- if and (hasPrefix .UnderlyingType "uint64") (not .IsAliased) }} -if err := validate.MinimumUint({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil { +if err := validate.MinimumUint({{ path . }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil { {{- else }} -if err := validate.MinimumUint({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, uint64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil { +if err := validate.MinimumUint({{ path . }}, {{ printf "%q" .Location }}, uint64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil { {{- end }} {{- else }} {{- if and (eq .UnderlyingType "float64") (not .IsAliased) }} -if err := validate.Minimum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil { +if err := validate.Minimum({{ path . }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil { {{- else }} -if err := validate.Minimum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, float64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil { +if err := validate.Minimum({{ path . }}, {{ printf "%q" .Location }}, float64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil { {{- end }} {{- end }} {{- end }} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/multipleOf.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/multipleOf.gotmpl index 28796852d..c48e75e39 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/multipleOf.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/multipleOf.gotmpl @@ -1,21 +1,21 @@ {{- if and (hasPrefix .UnderlyingType "int") (isInteger .MultipleOf) }}{{/* if the type is an integer, but the multiple factor is not, fall back to the float64 version of the validator */}} {{- if and (hasPrefix .UnderlyingType "int64") (not .IsAliased) }} -if err := validate.MultipleOfInt({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.MultipleOf }}); err != nil { +if err := validate.MultipleOfInt({{ path . }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.MultipleOf }}); err != nil { {{- else }} -if err := validate.MultipleOfInt({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, int64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.MultipleOf }}); err != nil { +if err := validate.MultipleOfInt({{ path . }}, {{ printf "%q" .Location }}, int64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.MultipleOf }}); err != nil { {{- end }} {{- else }} {{- if and (hasPrefix .UnderlyingType "uint") (isInteger .MultipleOf) }} {{- if and (hasPrefix .UnderlyingType "uint64") (not .IsAliased) }} -if err := validate.MultipleOfUint({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.MultipleOf }}); err != nil { +if err := validate.MultipleOfUint({{ path . }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.MultipleOf }}); err != nil { {{- else }} -if err := validate.MultipleOfUint({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, uint64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.MultipleOf }}); err != nil { +if err := validate.MultipleOfUint({{ path . }}, {{ printf "%q" .Location }}, uint64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.MultipleOf }}); err != nil { {{- end }} {{- else }} {{- if and (eq .UnderlyingType "float64") (not .IsAliased) }} -if err := validate.MultipleOf({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.MultipleOf }}); err != nil { +if err := validate.MultipleOf({{ path . }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.MultipleOf }}); err != nil { {{- else }} -if err := validate.MultipleOf({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, float64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.MultipleOf }}); err != nil { +if err := validate.MultipleOf({{ path . }}, {{ printf "%q" .Location }}, float64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.MultipleOf }}); err != nil { {{- end }} {{- end }} {{- end }} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/primitive.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/primitive.gotmpl index 35238d784..8dbf79643 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/primitive.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/primitive.gotmpl @@ -1,15 +1,15 @@ {{if .MinLength}} -if err := validate.MinLength({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ .ToString }}, {{.MinLength}}); err != nil { +if err := validate.MinLength({{ path . }}, {{ printf "%q" .Location }}, {{ .ToString }}, {{.MinLength}}); err != nil { return err } {{end}} {{if .MaxLength}} -if err := validate.MaxLength({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ .ToString }}, {{.MaxLength}}); err != nil { +if err := validate.MaxLength({{ path . }}, {{ printf "%q" .Location }}, {{ .ToString }}, {{.MaxLength}}); err != nil { return err } {{end}} {{if .Pattern}} -if err := validate.Pattern({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ .ToString }}, `{{escapeBackticks .Pattern}}`); err != nil { +if err := validate.Pattern({{ path . }}, {{ printf "%q" .Location }}, {{ .ToString }}, `{{escapeBackticks .Pattern}}`); err != nil { return err } {{end}} @@ -23,7 +23,7 @@ if err := validate.Pattern({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf {{ template "validationMultipleOf" . }} {{end}} {{if .Enum}} -if err := validate.EnumCase({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ if and (not .IsArray) (not .HasDiscriminator) (not .IsInterface) .IsNullable }}*{{ end }}{{.ValueExpression}}{{ if .IsCustomFormatter }}.String(){{ end }}, {{ printf "%#v" .Enum}}, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil { +if err := validate.EnumCase({{ path . }}, {{ printf "%q" .Location }}, {{ if and (not .IsArray) (not .HasDiscriminator) (not .IsInterface) .IsNullable }}*{{ end }}{{.ValueExpression}}{{ if .IsCustomFormatter }}.String(){{ end }}, {{ printf "%#v" .Enum}}, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil { return err } {{end}} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/structfield.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/structfield.gotmpl index 8378c4615..26fd47d2f 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/structfield.gotmpl +++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/structfield.gotmpl @@ -40,14 +40,6 @@ // Min Items: {{ .MinItems }} {{- end }} -{{- if .MinProperties }} -// Min Properties: {{ .MinProperties }} -{{- end }} - -{{- if .MaxProperties }} -// Max Properties: {{ .MaxProperties }} -{{- end }} - {{- if .UniqueItems }} // Unique: true {{- end }} @@ -57,6 +49,6 @@ {{- end }} {{- if .Enum }} -// Enum: {{ printf "%v" .Enum }} +// Enum: {{ json .Enum }} {{- end }} {{- end}} diff --git a/vendor/github.com/go-swagger/go-swagger/generator/types.go b/vendor/github.com/go-swagger/go-swagger/generator/types.go index d2a6a4f5e..5cc00f24b 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/types.go +++ b/vendor/github.com/go-swagger/go-swagger/generator/types.go @@ -345,7 +345,6 @@ func (t *typeResolver) inferAliasing(result *resolvedType, _ *spec.Schema, isAno } func (t *typeResolver) resolveFormat(schema *spec.Schema, isAnonymous bool, isRequired bool) (returns bool, result resolvedType, err error) { - if schema.Format != "" { // defaults to string result.SwaggerType = str @@ -401,7 +400,6 @@ func (t *typeResolver) resolveFormat(schema *spec.Schema, isAnonymous bool, isRe // // The interpretation of Required as a mean to make a type nullable is carried out elsewhere. func (t *typeResolver) isNullable(schema *spec.Schema) bool { - if nullable, ok := t.isNullableOverride(schema); ok { return nullable } @@ -1000,8 +998,8 @@ func warnSkipValidation(types interface{}) func(string, interface{}) { func guardValidations(tpe string, schema interface { Validations() spec.SchemaValidations SetValidations(spec.SchemaValidations) -}, types ...string) { - +}, types ...string, +) { v := schema.Validations() if len(types) == 0 { types = []string{tpe} @@ -1049,7 +1047,8 @@ func guardValidations(tpe string, schema interface { func guardFormatConflicts(format string, schema interface { Validations() spec.SchemaValidations SetValidations(spec.SchemaValidations) -}) { +}, +) { v := schema.Validations() msg := fmt.Sprintf("for format %q", format) diff --git a/vendor/github.com/go-swagger/go-swagger/scan/README.md b/vendor/github.com/go-swagger/go-swagger/scan/README.md deleted file mode 100644 index 1ae6f766f..000000000 --- a/vendor/github.com/go-swagger/go-swagger/scan/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# scan - -Pre go1.11 version of the go source parser, without support for go modules. diff --git a/vendor/github.com/go-swagger/go-swagger/scan/classifier.go b/vendor/github.com/go-swagger/go-swagger/scan/classifier.go deleted file mode 100644 index e674272d0..000000000 --- a/vendor/github.com/go-swagger/go-swagger/scan/classifier.go +++ /dev/null @@ -1,166 +0,0 @@ -//go:build !go1.11 -// +build !go1.11 - -// Copyright 2015 go-swagger maintainers -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package scan - -import ( - "fmt" - "go/ast" - "log" - "regexp" - - "golang.org/x/tools/go/loader" -) - -type packageFilter struct { - Name string -} - -func (pf *packageFilter) Matches(path string) bool { - matched, err := regexp.MatchString(pf.Name, path) - if err != nil { - log.Fatal(err) - } - return matched -} - -type packageFilters []packageFilter - -func (pf packageFilters) HasFilters() bool { - return len(pf) > 0 -} - -func (pf packageFilters) Matches(path string) bool { - for _, mod := range pf { - if mod.Matches(path) { - return true - } - } - return false -} - -type classifiedProgram struct { - Meta []*ast.File - Models []*ast.File - Routes []*ast.File - Operations []*ast.File - Parameters []*ast.File - Responses []*ast.File -} - -// programClassifier classifies the files of a program into buckets -// for processing by a swagger spec generator. This buckets files in -// 3 groups: Meta, Models and Operations. -// -// # Each of these buckets is then processed with an appropriate parsing strategy -// -// When there are Include or Exclude filters provide they are used to limit the -// candidates prior to parsing. -// The include filters take precedence over the excludes. So when something appears -// in both filters it will be included. -type programClassifier struct { - Includes packageFilters - Excludes packageFilters -} - -func (pc *programClassifier) Classify(prog *loader.Program) (*classifiedProgram, error) { - var cp classifiedProgram - for pkg, pkgInfo := range prog.AllPackages { - if Debug { - log.Printf("analyzing: %s\n", pkg.Path()) - } - if pc.Includes.HasFilters() { - if !pc.Includes.Matches(pkg.Path()) { - continue - } - } else if pc.Excludes.HasFilters() { - if pc.Excludes.Matches(pkg.Path()) { - continue - } - } - - for _, file := range pkgInfo.Files { - var ro, op, mt, pm, rs, mm bool // only add a particular file once - for _, comments := range file.Comments { - var seenStruct string - for _, cline := range comments.List { - if cline != nil { - matches := rxSwaggerAnnotation.FindStringSubmatch(cline.Text) - if len(matches) > 1 { - switch matches[1] { - case "route": - if !ro { - cp.Routes = append(cp.Routes, file) - ro = true - } - case "operation": - if !op { - cp.Operations = append(cp.Operations, file) - op = true - } - case "model": - if !mm { - cp.Models = append(cp.Models, file) - mm = true - } - if seenStruct == "" || seenStruct == matches[1] { - seenStruct = matches[1] - } else { - return nil, fmt.Errorf("classifier: already annotated as %s, can't also be %q - %s", seenStruct, matches[1], cline.Text) - } - case "meta": - if !mt { - cp.Meta = append(cp.Meta, file) - mt = true - } - case "parameters": - if !pm { - cp.Parameters = append(cp.Parameters, file) - pm = true - } - if seenStruct == "" || seenStruct == matches[1] { - seenStruct = matches[1] - } else { - return nil, fmt.Errorf("classifier: already annotated as %s, can't also be %q - %s", seenStruct, matches[1], cline.Text) - } - case "response": - if !rs { - cp.Responses = append(cp.Responses, file) - rs = true - } - if seenStruct == "" || seenStruct == matches[1] { - seenStruct = matches[1] - } else { - return nil, fmt.Errorf("classifier: already annotated as %s, can't also be %q - %s", seenStruct, matches[1], cline.Text) - } - case "strfmt", "name", "discriminated", "file", "enum", "default", "alias", "type": - // TODO: perhaps collect these and pass along to avoid lookups later on - case "allOf": - case "ignore": - default: - return nil, fmt.Errorf("classifier: unknown swagger annotation %q", matches[1]) - } - } - - } - } - } - } - } - - return &cp, nil -} diff --git a/vendor/github.com/go-swagger/go-swagger/scan/doc.go b/vendor/github.com/go-swagger/go-swagger/scan/doc.go deleted file mode 100644 index 2bc415a8f..000000000 --- a/vendor/github.com/go-swagger/go-swagger/scan/doc.go +++ /dev/null @@ -1,89 +0,0 @@ -//go:build !go1.11 -// +build !go1.11 - -// Copyright 2015 go-swagger maintainers -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* -Package scan provides a scanner for go files that produces a swagger spec document. - -This package is intended for pre-go1.11 versions, and does not support go modules. - -You give it a main file and it will parse all the files that are required by that main -package to produce a swagger specification. - -To use you can add a go:generate comment to your main file for example: - - //go:generate swagger generate spec - -The following annotations exist: - -swagger:meta - -The swagger:meta annotation flags a file as source for metadata about the API. -This is typically a doc.go file with your package documentation. - -You can specify a Consumes and Produces key which has a new content type on each line -Schemes is a tag that is required and allows for a comma separated string composed of: -http, https, ws or wss - -Host and BasePath can be specified but those values will be defaults, -they should get substituted when serving the swagger spec. - -Default parameters and responses are not supported at this stage, for those you can edit the template json. - -swagger:strfmt [name] - -A swagger:strfmt annotation names a type as a string formatter. The name is mandatory and that is -what will be used as format name for this particular string format. -String formats should only be used for very well known formats. - -swagger:model [?model name] - -A swagger:model annotation optionally gets a model name as extra data on the line. -when this appears anywhere in a comment for a struct, then that struct becomes a schema -in the definitions object of swagger. - -The struct gets analyzed and all the collected models are added to the tree. -The refs are tracked separately so that they can be renamed later on. - -When this annotation is found to be on an interface instead of a struct, the properties are provided -through exported nullary methods. - -A property of an interface model can have a Discriminator: true annotation to mark that field as -the field that will contain the discriminator value. - -swagger:route [method] [path pattern] [operation id] [?tag1 tag2 tag3] - -A swagger:route annotation links a path to a method. -This operation gets a unique id, which is used in various places as method name. -One such usage is in method names for client generation for example. - -Because there are many routers available, this tool does not try to parse the paths -you provided to your routing library of choice. So you have to specify your path pattern -yourself in valid swagger syntax. - -swagger:params [operationid1 operationid2] - -Links a struct to one or more operations. The params in the resulting swagger spec can be composed of several structs. -There are no guarantees given on how property name overlaps are resolved when several structs apply to the same operation. -This tag works very similarly to the swagger:model tag except that it produces valid parameter objects instead of schema -objects. - -swagger:response [?response name] - -Reads a struct decorated with swagger:response and uses that information to fill up the headers and the schema for a response. -A swagger:route can specify a response name for a status code and then the matching response will be used for that operation in the swagger definition. -*/ -package scan diff --git a/vendor/github.com/go-swagger/go-swagger/scan/enum.go b/vendor/github.com/go-swagger/go-swagger/scan/enum.go deleted file mode 100644 index d1ecc9c87..000000000 --- a/vendor/github.com/go-swagger/go-swagger/scan/enum.go +++ /dev/null @@ -1,84 +0,0 @@ -//go:build !go1.11 -// +build !go1.11 - -package scan - -import ( - "go/ast" - "strconv" - "strings" - "unicode" -) - -func upperSnakeCase(s string) string { - in := []rune(s) - isLower := func(idx int) bool { - return idx >= 0 && idx < len(in) && unicode.IsLower(in[idx]) - } - - out := make([]rune, 0, len(in)+len(in)/2) - - for i, r := range in { - if unicode.IsUpper(r) { - r = unicode.ToLower(r) - if i > 0 && in[i-1] != '_' && (isLower(i-1) || isLower(i+1)) { - out = append(out, '_') - } - } - out = append(out, r) - } - - return strings.ToUpper(string(out)) -} - -func getEnumBasicLitValue(basicLit *ast.BasicLit) interface{} { - switch basicLit.Kind.String() { - case "INT": - if result, err := strconv.ParseInt(basicLit.Value, 10, 64); err == nil { - return result - } - case "FLOAT": - if result, err := strconv.ParseFloat(basicLit.Value, 64); err == nil { - return result - } - default: - return strings.Trim(basicLit.Value, "\"") - } - return nil -} - -func getEnumValues(file *ast.File, typeName string) (list []interface{}) { - for _, decl := range file.Decls { - genDecl, ok := decl.(*ast.GenDecl) - - if !ok { - continue - } - - if genDecl.Tok.String() == "const" { - for _, spec := range genDecl.Specs { - if valueSpec, ok := spec.(*ast.ValueSpec); ok { - switch valueSpec.Type.(type) { - case *ast.Ident: - if valueSpec.Type.(*ast.Ident).Name == typeName { - if basicLit, ok := valueSpec.Values[0].(*ast.BasicLit); ok { - list = append(list, getEnumBasicLitValue(basicLit)) - } - } - default: - var name = valueSpec.Names[0].Name - if strings.HasPrefix(name, upperSnakeCase(typeName)) { - var values = strings.SplitN(name, "__", 2) - if len(values) == 2 { - list = append(list, values[1]) - } - } - } - - } - - } - } - } - return -} diff --git a/vendor/github.com/go-swagger/go-swagger/scan/meta.go b/vendor/github.com/go-swagger/go-swagger/scan/meta.go deleted file mode 100644 index f5b5ed5dd..000000000 --- a/vendor/github.com/go-swagger/go-swagger/scan/meta.go +++ /dev/null @@ -1,246 +0,0 @@ -//go:build !go1.11 -// +build !go1.11 - -// Copyright 2015 go-swagger maintainers -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package scan - -import ( - "encoding/json" - "fmt" - "net/mail" - "regexp" - "strings" - - "github.com/go-openapi/spec" -) - -func metaTOSSetter(meta *spec.Info) func([]string) { - return func(lines []string) { - meta.TermsOfService = joinDropLast(lines) - } -} - -func metaConsumesSetter(meta *spec.Swagger) func([]string) { - return func(consumes []string) { meta.Consumes = consumes } -} - -func metaProducesSetter(meta *spec.Swagger) func([]string) { - return func(produces []string) { meta.Produces = produces } -} - -func metaSchemeSetter(meta *spec.Swagger) func([]string) { - return func(schemes []string) { meta.Schemes = schemes } -} - -func metaSecuritySetter(meta *spec.Swagger) func([]map[string][]string) { - return func(secDefs []map[string][]string) { meta.Security = secDefs } -} - -func metaSecurityDefinitionsSetter(meta *spec.Swagger) func(json.RawMessage) error { - return func(jsonValue json.RawMessage) error { - var jsonData spec.SecurityDefinitions - err := json.Unmarshal(jsonValue, &jsonData) - if err != nil { - return err - } - meta.SecurityDefinitions = jsonData - return nil - } -} - -func metaVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error { - return func(jsonValue json.RawMessage) error { - var jsonData spec.Extensions - err := json.Unmarshal(jsonValue, &jsonData) - if err != nil { - return err - } - for k := range jsonData { - if !rxAllowedExtensions.MatchString(k) { - return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k) - } - } - meta.Extensions = jsonData - return nil - } -} - -func infoVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error { - return func(jsonValue json.RawMessage) error { - var jsonData spec.Extensions - err := json.Unmarshal(jsonValue, &jsonData) - if err != nil { - return err - } - for k := range jsonData { - if !rxAllowedExtensions.MatchString(k) { - return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k) - } - } - meta.Info.Extensions = jsonData - return nil - } -} - -func newMetaParser(swspec *spec.Swagger) *sectionedParser { - sp := new(sectionedParser) - if swspec.Info == nil { - swspec.Info = new(spec.Info) - } - info := swspec.Info - sp.setTitle = func(lines []string) { - tosave := joinDropLast(lines) - if len(tosave) > 0 { - tosave = rxStripTitleComments.ReplaceAllString(tosave, "") - } - info.Title = tosave - } - sp.setDescription = func(lines []string) { info.Description = joinDropLast(lines) } - sp.taggers = []tagParser{ - newMultiLineTagParser("TOS", newMultilineDropEmptyParser(rxTOS, metaTOSSetter(info)), false), - newMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, metaConsumesSetter(swspec)), false), - newMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, metaProducesSetter(swspec)), false), - newSingleLineTagParser("Schemes", newSetSchemes(metaSchemeSetter(swspec))), - newMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, metaSecuritySetter(swspec)), false), - newMultiLineTagParser("SecurityDefinitions", newYamlParser(rxSecurity, metaSecurityDefinitionsSetter(swspec)), true), - newSingleLineTagParser("Version", &setMetaSingle{swspec, rxVersion, setInfoVersion}), - newSingleLineTagParser("Host", &setMetaSingle{swspec, rxHost, setSwaggerHost}), - newSingleLineTagParser("BasePath", &setMetaSingle{swspec, rxBasePath, setSwaggerBasePath}), - newSingleLineTagParser("Contact", &setMetaSingle{swspec, rxContact, setInfoContact}), - newSingleLineTagParser("License", &setMetaSingle{swspec, rxLicense, setInfoLicense}), - newMultiLineTagParser("YAMLInfoExtensionsBlock", newYamlParser(rxInfoExtensions, infoVendorExtensibleSetter(swspec)), true), - newMultiLineTagParser("YAMLExtensionsBlock", newYamlParser(rxExtensions, metaVendorExtensibleSetter(swspec)), true), - } - return sp -} - -type setMetaSingle struct { - spec *spec.Swagger - rx *regexp.Regexp - set func(spec *spec.Swagger, lines []string) error -} - -func (s *setMetaSingle) Matches(line string) bool { - return s.rx.MatchString(line) -} - -func (s *setMetaSingle) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := s.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - return s.set(s.spec, []string{matches[1]}) - } - return nil -} - -func setSwaggerHost(swspec *spec.Swagger, lines []string) error { - lns := lines - if len(lns) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - lns = []string{"localhost"} - } - swspec.Host = lns[0] - return nil -} - -func setSwaggerBasePath(swspec *spec.Swagger, lines []string) error { - var ln string - if len(lines) > 0 { - ln = lines[0] - } - swspec.BasePath = ln - return nil -} - -func setInfoVersion(swspec *spec.Swagger, lines []string) error { - if len(lines) == 0 { - return nil - } - info := safeInfo(swspec) - info.Version = strings.TrimSpace(lines[0]) - return nil -} - -func setInfoContact(swspec *spec.Swagger, lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - contact, err := parseContactInfo(lines[0]) - if err != nil { - return err - } - info := safeInfo(swspec) - info.Contact = contact - return nil -} - -func parseContactInfo(line string) (*spec.ContactInfo, error) { - nameEmail, url := splitURL(line) - var name, email string - if len(nameEmail) > 0 { - addr, err := mail.ParseAddress(nameEmail) - if err != nil { - return nil, err - } - name, email = addr.Name, addr.Address - } - return &spec.ContactInfo{ - URL: url, - Name: name, - Email: email, - }, nil -} - -func setInfoLicense(swspec *spec.Swagger, lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - info := safeInfo(swspec) - line := lines[0] - name, url := splitURL(line) - info.License = &spec.License{ - Name: name, - URL: url, - } - return nil -} - -func safeInfo(swspec *spec.Swagger) *spec.Info { - if swspec.Info == nil { - swspec.Info = new(spec.Info) - } - return swspec.Info -} - -// httpFTPScheme matches http://, https://, ws://, wss:// -var httpFTPScheme = regexp.MustCompile("(?:(?:ht|f)tp|ws)s?://") - -func splitURL(line string) (notURL, url string) { - str := strings.TrimSpace(line) - parts := httpFTPScheme.FindStringIndex(str) - if len(parts) == 0 { - if len(str) > 0 { - notURL = str - } - return - } - if len(parts) > 0 { - notURL = strings.TrimSpace(str[:parts[0]]) - url = strings.TrimSpace(str[parts[0]:]) - } - return -} diff --git a/vendor/github.com/go-swagger/go-swagger/scan/operations.go b/vendor/github.com/go-swagger/go-swagger/scan/operations.go deleted file mode 100644 index 31e2ea5a9..000000000 --- a/vendor/github.com/go-swagger/go-swagger/scan/operations.go +++ /dev/null @@ -1,85 +0,0 @@ -//go:build !go1.11 -// +build !go1.11 - -// Copyright 2015 go-swagger maintainers -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package scan - -import ( - "fmt" - "go/ast" - - "github.com/go-openapi/spec" - - "golang.org/x/tools/go/loader" -) - -func newOperationsParser(prog *loader.Program) *operationsParser { - return &operationsParser{ - program: prog, - } -} - -type operationsParser struct { - program *loader.Program - definitions map[string]spec.Schema - operations map[string]*spec.Operation - responses map[string]spec.Response -} - -func (op *operationsParser) Parse(gofile *ast.File, target interface{}, includeTags map[string]bool, excludeTags map[string]bool) error { - tgt := target.(*spec.Paths) - for _, comsec := range gofile.Comments { - content := parsePathAnnotation(rxOperation, comsec.List) - - if content.Method == "" { - continue // it's not, next! - } - - if !shouldAcceptTag(content.Tags, includeTags, excludeTags) { - if Debug { - fmt.Printf("operation %s %s is ignored due to tag rules\n", content.Method, content.Path) - } - continue - } - - pthObj := tgt.Paths[content.Path] - - op := setPathOperation( - content.Method, content.ID, - &pthObj, op.operations[content.ID]) - - op.Tags = content.Tags - - sp := new(yamlSpecScanner) - sp.setTitle = func(lines []string) { op.Summary = joinDropLast(lines) } - sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) } - - if err := sp.Parse(content.Remaining); err != nil { - return fmt.Errorf("operation (%s): %v", op.ID, err) - } - if err := sp.UnmarshalSpec(op.UnmarshalJSON); err != nil { - return fmt.Errorf("operation (%s): %v", op.ID, err) - } - - if tgt.Paths == nil { - tgt.Paths = make(map[string]spec.PathItem) - } - - tgt.Paths[content.Path] = pthObj - } - - return nil -} diff --git a/vendor/github.com/go-swagger/go-swagger/scan/parameters.go b/vendor/github.com/go-swagger/go-swagger/scan/parameters.go deleted file mode 100644 index 58d96ebe3..000000000 --- a/vendor/github.com/go-swagger/go-swagger/scan/parameters.go +++ /dev/null @@ -1,515 +0,0 @@ -//go:build !go1.11 -// +build !go1.11 - -// Copyright 2015 go-swagger maintainers -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package scan - -import ( - "fmt" - "go/ast" - "strings" - - "github.com/go-openapi/spec" - "golang.org/x/tools/go/loader" -) - -type operationValidationBuilder interface { - validationBuilder - SetCollectionFormat(string) -} - -type paramTypable struct { - param *spec.Parameter -} - -func (pt paramTypable) Level() int { return 0 } - -func (pt paramTypable) Typed(tpe, format string) { - pt.param.Typed(tpe, format) -} - -func (pt paramTypable) WithEnum(values ...interface{}) { - pt.param.WithEnum(values...) -} - -func (pt paramTypable) SetRef(ref spec.Ref) { - pt.param.Ref = ref -} - -func (pt paramTypable) Items() swaggerTypable { - bdt, schema := bodyTypable(pt.param.In, pt.param.Schema) - if bdt != nil { - pt.param.Schema = schema - return bdt - } - - if pt.param.Items == nil { - pt.param.Items = new(spec.Items) - } - pt.param.Type = "array" - return itemsTypable{pt.param.Items, 1} -} - -func (pt paramTypable) Schema() *spec.Schema { - if pt.param.In != "body" { - return nil - } - if pt.param.Schema == nil { - pt.param.Schema = new(spec.Schema) - } - return pt.param.Schema -} - -type itemsTypable struct { - items *spec.Items - level int -} - -func (pt itemsTypable) Level() int { return pt.level } - -func (pt itemsTypable) Typed(tpe, format string) { - pt.items.Typed(tpe, format) -} - -func (pt itemsTypable) SetRef(ref spec.Ref) { - pt.items.Ref = ref -} - -func (pt itemsTypable) WithEnum(values ...interface{}) { - pt.items.WithEnum(values...) -} - -func (pt itemsTypable) Schema() *spec.Schema { - return nil -} - -func (pt itemsTypable) Items() swaggerTypable { - if pt.items.Items == nil { - pt.items.Items = new(spec.Items) - } - pt.items.Type = "array" - return itemsTypable{pt.items.Items, pt.level + 1} -} - -type paramValidations struct { - current *spec.Parameter -} - -func (sv paramValidations) SetMaximum(val float64, exclusive bool) { - sv.current.Maximum = &val - sv.current.ExclusiveMaximum = exclusive -} -func (sv paramValidations) SetMinimum(val float64, exclusive bool) { - sv.current.Minimum = &val - sv.current.ExclusiveMinimum = exclusive -} -func (sv paramValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val } -func (sv paramValidations) SetMinItems(val int64) { sv.current.MinItems = &val } -func (sv paramValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val } -func (sv paramValidations) SetMinLength(val int64) { sv.current.MinLength = &val } -func (sv paramValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val } -func (sv paramValidations) SetPattern(val string) { sv.current.Pattern = val } -func (sv paramValidations) SetUnique(val bool) { sv.current.UniqueItems = val } -func (sv paramValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val } -func (sv paramValidations) SetEnum(val string) { - sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format}) -} -func (sv paramValidations) SetDefault(val interface{}) { sv.current.Default = val } -func (sv paramValidations) SetExample(val interface{}) { sv.current.Example = val } - -type itemsValidations struct { - current *spec.Items -} - -func (sv itemsValidations) SetMaximum(val float64, exclusive bool) { - sv.current.Maximum = &val - sv.current.ExclusiveMaximum = exclusive -} -func (sv itemsValidations) SetMinimum(val float64, exclusive bool) { - sv.current.Minimum = &val - sv.current.ExclusiveMinimum = exclusive -} -func (sv itemsValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val } -func (sv itemsValidations) SetMinItems(val int64) { sv.current.MinItems = &val } -func (sv itemsValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val } -func (sv itemsValidations) SetMinLength(val int64) { sv.current.MinLength = &val } -func (sv itemsValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val } -func (sv itemsValidations) SetPattern(val string) { sv.current.Pattern = val } -func (sv itemsValidations) SetUnique(val bool) { sv.current.UniqueItems = val } -func (sv itemsValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val } -func (sv itemsValidations) SetEnum(val string) { - sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format}) -} -func (sv itemsValidations) SetDefault(val interface{}) { sv.current.Default = val } -func (sv itemsValidations) SetExample(val interface{}) { sv.current.Example = val } - -type paramDecl struct { - File *ast.File - Decl *ast.GenDecl - TypeSpec *ast.TypeSpec - OperationIDs []string -} - -func (sd *paramDecl) inferOperationIDs() (opids []string) { - if len(sd.OperationIDs) > 0 { - opids = sd.OperationIDs - return - } - - if sd.Decl.Doc != nil { - for _, cmt := range sd.Decl.Doc.List { - for _, ln := range strings.Split(cmt.Text, "\n") { - matches := rxParametersOverride.FindStringSubmatch(ln) - if len(matches) > 1 && len(matches[1]) > 0 { - for _, pt := range strings.Split(matches[1], " ") { - tr := strings.TrimSpace(pt) - if len(tr) > 0 { - opids = append(opids, tr) - } - } - } - } - } - } - sd.OperationIDs = append(sd.OperationIDs, opids...) - return -} - -func newParameterParser(prog *loader.Program) *paramStructParser { - scp := new(paramStructParser) - scp.program = prog - scp.scp = newSchemaParser(prog) - return scp -} - -type paramStructParser struct { - program *loader.Program - postDecls []schemaDecl - scp *schemaParser -} - -// Parse will traverse a file and look for parameters. -func (pp *paramStructParser) Parse(gofile *ast.File, target interface{}) error { - tgt := target.(map[string]*spec.Operation) - for _, decl := range gofile.Decls { - switch x1 := decl.(type) { - // Check for parameters at the package level. - case *ast.GenDecl: - for _, spc := range x1.Specs { - switch x2 := spc.(type) { - case *ast.TypeSpec: - sd := paramDecl{gofile, x1, x2, nil} - sd.inferOperationIDs() - if err := pp.parseDecl(tgt, sd); err != nil { - return err - } - } - } - // Check for parameters inside functions. - case *ast.FuncDecl: - for _, b := range x1.Body.List { - switch x2 := b.(type) { - case *ast.DeclStmt: - switch x3 := x2.Decl.(type) { - case *ast.GenDecl: - for _, spc := range x3.Specs { - switch x4 := spc.(type) { - case *ast.TypeSpec: - sd := paramDecl{gofile, x3, x4, nil} - sd.inferOperationIDs() - if err := pp.parseDecl(tgt, sd); err != nil { - return err - } - } - } - } - } - } - } - } - return nil -} - -func (pp *paramStructParser) parseDecl(operations map[string]*spec.Operation, decl paramDecl) error { - // check if there is a swagger:parameters tag that is followed by one or more words, - // these words are the ids of the operations this parameter struct applies to - // once type name is found convert it to a schema, by looking up the schema in the - // parameters dictionary that got passed into this parse method - for _, opid := range decl.inferOperationIDs() { - operation, ok := operations[opid] - if !ok { - operation = new(spec.Operation) - operations[opid] = operation - operation.ID = opid - } - - // analyze struct body for fields etc - // each exported struct field: - // * gets a type mapped to a go primitive - // * perhaps gets a format - // * has to document the validations that apply for the type and the field - // * when the struct field points to a model it becomes a ref: #/definitions/ModelName - // * comments that aren't tags is used as the description - if tpe, ok := decl.TypeSpec.Type.(*ast.StructType); ok { - if err := pp.parseStructType(decl.File, operation, tpe, make(map[string]spec.Parameter)); err != nil { - return err - } - } - - //operations[opid] = operation - } - return nil -} - -func (pp *paramStructParser) parseEmbeddedStruct(gofile *ast.File, operation *spec.Operation, expr ast.Expr, seenPreviously map[string]spec.Parameter) error { - switch tpe := expr.(type) { - case *ast.Ident: - // do lookup of type - // take primitives into account, they should result in an error for swagger - pkg, err := pp.scp.packageForFile(gofile, tpe) - if err != nil { - return fmt.Errorf("embedded struct: %v", err) - } - file, _, ts, err := findSourceFile(pkg, tpe.Name) - if err != nil { - return fmt.Errorf("embedded struct: %v", err) - } - if st, ok := ts.Type.(*ast.StructType); ok { - return pp.parseStructType(file, operation, st, seenPreviously) - } - case *ast.SelectorExpr: - // look up package, file and then type - pkg, err := pp.scp.packageForSelector(gofile, tpe.X) - if err != nil { - return fmt.Errorf("embedded struct: %v", err) - } - file, _, ts, err := findSourceFile(pkg, tpe.Sel.Name) - if err != nil { - return fmt.Errorf("embedded struct: %v", err) - } - if st, ok := ts.Type.(*ast.StructType); ok { - return pp.parseStructType(file, operation, st, seenPreviously) - } - case *ast.StarExpr: - return pp.parseEmbeddedStruct(gofile, operation, tpe.X, seenPreviously) - } - fmt.Printf("3%#v\n", expr) - return fmt.Errorf("unable to resolve embedded struct for: %v", expr) -} - -func (pp *paramStructParser) parseStructType(gofile *ast.File, operation *spec.Operation, tpe *ast.StructType, seenPreviously map[string]spec.Parameter) error { - if tpe.Fields != nil { - pt := seenPreviously - - for _, fld := range tpe.Fields.List { - if len(fld.Names) == 0 { - // when the embedded struct is annotated with swagger:allOf it will be used as allOf property - // otherwise the fields will just be included as normal properties - if err := pp.parseEmbeddedStruct(gofile, operation, fld.Type, pt); err != nil { - return err - } - } - } - - // a slice used to keep track of the sequence of the map keys, as maps does not keep to any specific sequence (since Go-1.4) - sequence := []string{} - - for _, fld := range tpe.Fields.List { - if len(fld.Names) > 0 && fld.Names[0] != nil && fld.Names[0].IsExported() { - gnm := fld.Names[0].Name - nm, ignore, _, err := parseJSONTag(fld) - if err != nil { - return err - } - if ignore { - continue - } - - in := "query" - // scan for param location first, this changes some behavior down the line - if fld.Doc != nil { - for _, cmt := range fld.Doc.List { - for _, line := range strings.Split(cmt.Text, "\n") { - matches := rxIn.FindStringSubmatch(line) - if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 { - in = strings.TrimSpace(matches[1]) - } - } - } - } - - ps := pt[nm] - ps.In = in - var pty swaggerTypable = paramTypable{&ps} - if in == "body" { - pty = schemaTypable{pty.Schema(), 0} - } - if in == "formData" && fld.Doc != nil && fileParam(fld.Doc) { - pty.Typed("file", "") - } else { - if err := pp.scp.parseNamedType(gofile, fld.Type, pty); err != nil { - return err - } - } - - if strfmtName, ok := strfmtName(fld.Doc); ok { - ps.Typed("string", strfmtName) - ps.Ref = spec.Ref{} - } - - sp := new(sectionedParser) - sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) } - if ps.Ref.String() == "" { - sp.taggers = []tagParser{ - newSingleLineTagParser("in", &matchOnlyParam{&ps, rxIn}), - newSingleLineTagParser("maximum", &setMaximum{paramValidations{&ps}, rxf(rxMaximumFmt, "")}), - newSingleLineTagParser("minimum", &setMinimum{paramValidations{&ps}, rxf(rxMinimumFmt, "")}), - newSingleLineTagParser("multipleOf", &setMultipleOf{paramValidations{&ps}, rxf(rxMultipleOfFmt, "")}), - newSingleLineTagParser("minLength", &setMinLength{paramValidations{&ps}, rxf(rxMinLengthFmt, "")}), - newSingleLineTagParser("maxLength", &setMaxLength{paramValidations{&ps}, rxf(rxMaxLengthFmt, "")}), - newSingleLineTagParser("pattern", &setPattern{paramValidations{&ps}, rxf(rxPatternFmt, "")}), - newSingleLineTagParser("collectionFormat", &setCollectionFormat{paramValidations{&ps}, rxf(rxCollectionFormatFmt, "")}), - newSingleLineTagParser("minItems", &setMinItems{paramValidations{&ps}, rxf(rxMinItemsFmt, "")}), - newSingleLineTagParser("maxItems", &setMaxItems{paramValidations{&ps}, rxf(rxMaxItemsFmt, "")}), - newSingleLineTagParser("unique", &setUnique{paramValidations{&ps}, rxf(rxUniqueFmt, "")}), - newSingleLineTagParser("enum", &setEnum{paramValidations{&ps}, rxf(rxEnumFmt, "")}), - newSingleLineTagParser("default", &setDefault{&ps.SimpleSchema, paramValidations{&ps}, rxf(rxDefaultFmt, "")}), - newSingleLineTagParser("example", &setExample{&ps.SimpleSchema, paramValidations{&ps}, rxf(rxExampleFmt, "")}), - newSingleLineTagParser("required", &setRequiredParam{&ps}), - } - - itemsTaggers := func(items *spec.Items, level int) []tagParser { - // the expression is 1-index based not 0-index - itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1) - - return []tagParser{ - newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{itemsValidations{items}, rxf(rxEnumFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&items.SimpleSchema, itemsValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&items.SimpleSchema, itemsValidations{items}, rxf(rxExampleFmt, itemsPrefix)}), - } - } - - var parseArrayTypes func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) - parseArrayTypes = func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) { - if items == nil { - return []tagParser{}, nil - } - switch iftpe := expr.(type) { - case *ast.ArrayType: - eleTaggers := itemsTaggers(items, level) - sp.taggers = append(eleTaggers, sp.taggers...) - otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Items, level+1) - if err != nil { - return nil, err - } - return otherTaggers, nil - case *ast.SelectorExpr: - otherTaggers, err := parseArrayTypes(iftpe.Sel, items.Items, level+1) - if err != nil { - return nil, err - } - return otherTaggers, nil - case *ast.Ident: - taggers := []tagParser{} - if iftpe.Obj == nil { - taggers = itemsTaggers(items, level) - } - otherTaggers, err := parseArrayTypes(expr, items.Items, level+1) - if err != nil { - return nil, err - } - return append(taggers, otherTaggers...), nil - case *ast.StarExpr: - otherTaggers, err := parseArrayTypes(iftpe.X, items, level) - if err != nil { - return nil, err - } - return otherTaggers, nil - default: - return nil, fmt.Errorf("unknown field type ele for %q", nm) - } - } - - // check if this is a primitive, if so parse the validations from the - // doc comments of the slice declaration. - if ftped, ok := fld.Type.(*ast.ArrayType); ok { - taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0) - if err != nil { - return err - } - sp.taggers = append(taggers, sp.taggers...) - } - - } else { - - sp.taggers = []tagParser{ - newSingleLineTagParser("in", &matchOnlyParam{&ps, rxIn}), - newSingleLineTagParser("required", &matchOnlyParam{&ps, rxRequired}), - } - } - if err := sp.Parse(fld.Doc); err != nil { - return err - } - if ps.In == "path" { - ps.Required = true - } - - if ps.Name == "" { - ps.Name = nm - } - - if nm != gnm { - addExtension(&ps.VendorExtensible, "x-go-name", gnm) - } - pt[nm] = ps - sequence = append(sequence, nm) - } - } - - for _, k := range sequence { - p := pt[k] - for i, v := range operation.Parameters { - if v.Name == k { - operation.Parameters = append(operation.Parameters[:i], operation.Parameters[i+1:]...) - break - } - } - operation.Parameters = append(operation.Parameters, p) - } - } - - return nil -} - -func isAliasParam(prop swaggerTypable) bool { - var isParam bool - if param, ok := prop.(paramTypable); ok { - isParam = param.param.In == "query" || - param.param.In == "path" || - param.param.In == "formData" - } - return isParam -} diff --git a/vendor/github.com/go-swagger/go-swagger/scan/path.go b/vendor/github.com/go-swagger/go-swagger/scan/path.go deleted file mode 100644 index 7302d41c3..000000000 --- a/vendor/github.com/go-swagger/go-swagger/scan/path.go +++ /dev/null @@ -1,151 +0,0 @@ -//go:build !go1.11 -// +build !go1.11 - -// Copyright 2015 go-swagger maintainers -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package scan - -import ( - "go/ast" - "regexp" - "strings" - - "github.com/go-openapi/spec" -) - -type parsedPathContent struct { - Method, Path, ID string - Tags []string - Remaining *ast.CommentGroup -} - -func parsePathAnnotation(annotation *regexp.Regexp, lines []*ast.Comment) (cnt parsedPathContent) { - var justMatched bool - - for _, cmt := range lines { - for _, line := range strings.Split(cmt.Text, "\n") { - matches := annotation.FindStringSubmatch(line) - if len(matches) > 3 { - cnt.Method, cnt.Path, cnt.ID = matches[1], matches[2], matches[len(matches)-1] - cnt.Tags = rxSpace.Split(matches[3], -1) - if len(matches[3]) == 0 { - cnt.Tags = nil - } - justMatched = true - } else if cnt.Method != "" { - if cnt.Remaining == nil { - cnt.Remaining = new(ast.CommentGroup) - } - if !justMatched || strings.TrimSpace(rxStripComments.ReplaceAllString(line, "")) != "" { - cc := new(ast.Comment) - cc.Slash = cmt.Slash - cc.Text = line - cnt.Remaining.List = append(cnt.Remaining.List, cc) - justMatched = false - } - } - } - } - - return -} - -func setPathOperation(method, id string, pthObj *spec.PathItem, op *spec.Operation) *spec.Operation { - if op == nil { - op = new(spec.Operation) - op.ID = id - } - - switch strings.ToUpper(method) { - case "GET": - if pthObj.Get != nil { - if id == pthObj.Get.ID { - op = pthObj.Get - } else { - pthObj.Get = op - } - } else { - pthObj.Get = op - } - - case "POST": - if pthObj.Post != nil { - if id == pthObj.Post.ID { - op = pthObj.Post - } else { - pthObj.Post = op - } - } else { - pthObj.Post = op - } - - case "PUT": - if pthObj.Put != nil { - if id == pthObj.Put.ID { - op = pthObj.Put - } else { - pthObj.Put = op - } - } else { - pthObj.Put = op - } - - case "PATCH": - if pthObj.Patch != nil { - if id == pthObj.Patch.ID { - op = pthObj.Patch - } else { - pthObj.Patch = op - } - } else { - pthObj.Patch = op - } - - case "HEAD": - if pthObj.Head != nil { - if id == pthObj.Head.ID { - op = pthObj.Head - } else { - pthObj.Head = op - } - } else { - pthObj.Head = op - } - - case "DELETE": - if pthObj.Delete != nil { - if id == pthObj.Delete.ID { - op = pthObj.Delete - } else { - pthObj.Delete = op - } - } else { - pthObj.Delete = op - } - - case "OPTIONS": - if pthObj.Options != nil { - if id == pthObj.Options.ID { - op = pthObj.Options - } else { - pthObj.Options = op - } - } else { - pthObj.Options = op - } - } - - return op -} diff --git a/vendor/github.com/go-swagger/go-swagger/scan/responses.go b/vendor/github.com/go-swagger/go-swagger/scan/responses.go deleted file mode 100644 index 327b8a488..000000000 --- a/vendor/github.com/go-swagger/go-swagger/scan/responses.go +++ /dev/null @@ -1,453 +0,0 @@ -//go:build !go1.11 -// +build !go1.11 - -// Copyright 2015 go-swagger maintainers -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package scan - -import ( - "fmt" - "go/ast" - "strings" - - "golang.org/x/tools/go/loader" - - "github.com/go-openapi/spec" -) - -type responseTypable struct { - in string - header *spec.Header - response *spec.Response -} - -func (ht responseTypable) Level() int { return 0 } - -func (ht responseTypable) Typed(tpe, format string) { - ht.header.Typed(tpe, format) -} - -func (ht responseTypable) WithEnum(values ...interface{}) { - ht.header.WithEnum(values) -} - -func bodyTypable(in string, schema *spec.Schema) (swaggerTypable, *spec.Schema) { - if in == "body" { - // get the schema for items on the schema property - if schema == nil { - schema = new(spec.Schema) - } - if schema.Items == nil { - schema.Items = new(spec.SchemaOrArray) - } - if schema.Items.Schema == nil { - schema.Items.Schema = new(spec.Schema) - } - schema.Typed("array", "") - return schemaTypable{schema.Items.Schema, 0}, schema - } - return nil, nil -} - -func (ht responseTypable) Items() swaggerTypable { - bdt, schema := bodyTypable(ht.in, ht.response.Schema) - if bdt != nil { - ht.response.Schema = schema - return bdt - } - - if ht.header.Items == nil { - ht.header.Items = new(spec.Items) - } - ht.header.Type = "array" - return itemsTypable{ht.header.Items, 1} -} - -func (ht responseTypable) SetRef(ref spec.Ref) { - // having trouble seeing the usefulness of this one here - ht.Schema().Ref = ref -} - -func (ht responseTypable) Schema() *spec.Schema { - if ht.response.Schema == nil { - ht.response.Schema = new(spec.Schema) - } - return ht.response.Schema -} - -func (ht responseTypable) SetSchema(schema *spec.Schema) { - ht.response.Schema = schema -} - -func (ht responseTypable) CollectionOf(items *spec.Items, format string) { - ht.header.CollectionOf(items, format) -} - -type headerValidations struct { - current *spec.Header -} - -func (sv headerValidations) SetMaximum(val float64, exclusive bool) { - sv.current.Maximum = &val - sv.current.ExclusiveMaximum = exclusive -} -func (sv headerValidations) SetMinimum(val float64, exclusive bool) { - sv.current.Minimum = &val - sv.current.ExclusiveMinimum = exclusive -} -func (sv headerValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val } -func (sv headerValidations) SetMinItems(val int64) { sv.current.MinItems = &val } -func (sv headerValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val } -func (sv headerValidations) SetMinLength(val int64) { sv.current.MinLength = &val } -func (sv headerValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val } -func (sv headerValidations) SetPattern(val string) { sv.current.Pattern = val } -func (sv headerValidations) SetUnique(val bool) { sv.current.UniqueItems = val } -func (sv headerValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val } -func (sv headerValidations) SetEnum(val string) { - sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format}) -} -func (sv headerValidations) SetDefault(val interface{}) { sv.current.Default = val } -func (sv headerValidations) SetExample(val interface{}) { sv.current.Example = val } - -func newResponseDecl(file *ast.File, decl *ast.GenDecl, ts *ast.TypeSpec) responseDecl { - var rd responseDecl - rd.File = file - rd.Decl = decl - rd.TypeSpec = ts - rd.inferNames() - return rd -} - -type responseDecl struct { - File *ast.File - Decl *ast.GenDecl - TypeSpec *ast.TypeSpec - GoName string - Name string - annotated bool -} - -func (sd *responseDecl) hasAnnotation() bool { - sd.inferNames() - return sd.annotated -} - -func (sd *responseDecl) inferNames() (goName string, name string) { - if sd.GoName != "" { - goName, name = sd.GoName, sd.Name - return - } - goName = sd.TypeSpec.Name.Name - name = goName - if sd.Decl.Doc != nil { - DECLS: - for _, cmt := range sd.Decl.Doc.List { - for _, ln := range strings.Split(cmt.Text, "\n") { - matches := rxResponseOverride.FindStringSubmatch(ln) - if len(matches) > 0 { - sd.annotated = true - } - if len(matches) > 1 && len(matches[1]) > 0 { - name = matches[1] - break DECLS - } - } - } - } - sd.GoName = goName - sd.Name = name - return -} - -func newResponseParser(prog *loader.Program) *responseParser { - return &responseParser{prog, nil, newSchemaParser(prog)} -} - -type responseParser struct { - program *loader.Program - postDecls []schemaDecl - scp *schemaParser -} - -func (rp *responseParser) Parse(gofile *ast.File, target interface{}) error { - tgt := target.(map[string]spec.Response) - for _, decl := range gofile.Decls { - switch x1 := decl.(type) { - // Check for parameters at the package level. - case *ast.GenDecl: - for _, spc := range x1.Specs { - switch x2 := spc.(type) { - case *ast.TypeSpec: - sd := newResponseDecl(gofile, x1, x2) - if sd.hasAnnotation() { - if err := rp.parseDecl(tgt, sd); err != nil { - return err - } - } - } - } - // Check for parameters inside functions. - case *ast.FuncDecl: - for _, b := range x1.Body.List { - switch x2 := b.(type) { - case *ast.DeclStmt: - switch x3 := x2.Decl.(type) { - case *ast.GenDecl: - for _, spc := range x3.Specs { - switch x4 := spc.(type) { - case *ast.TypeSpec: - sd := newResponseDecl(gofile, x3, x4) - if sd.hasAnnotation() { - if err := rp.parseDecl(tgt, sd); err != nil { - return err - } - } - } - } - } - } - } - } - } - return nil -} - -func (rp *responseParser) parseDecl(responses map[string]spec.Response, decl responseDecl) error { - // check if there is a swagger:parameters tag that is followed by one or more words, - // these words are the ids of the operations this parameter struct applies to - // once type name is found convert it to a schema, by looking up the schema in the - // parameters dictionary that got passed into this parse method - response := responses[decl.Name] - resPtr := &response - - // analyze doc comment for the model - sp := new(sectionedParser) - sp.setDescription = func(lines []string) { resPtr.Description = joinDropLast(lines) } - if err := sp.Parse(decl.Decl.Doc); err != nil { - return err - } - - // analyze struct body for fields etc - // each exported struct field: - // * gets a type mapped to a go primitive - // * perhaps gets a format - // * has to document the validations that apply for the type and the field - // * when the struct field points to a model it becomes a ref: #/definitions/ModelName - // * comments that aren't tags is used as the description - if tpe, ok := decl.TypeSpec.Type.(*ast.StructType); ok { - if err := rp.parseStructType(decl.File, resPtr, tpe, make(map[string]struct{})); err != nil { - return err - } - } - - responses[decl.Name] = response - return nil -} - -func (rp *responseParser) parseEmbeddedStruct(gofile *ast.File, response *spec.Response, expr ast.Expr, seenPreviously map[string]struct{}) error { - switch tpe := expr.(type) { - case *ast.Ident: - // do lookup of type - // take primitives into account, they should result in an error for swagger - pkg, err := rp.scp.packageForFile(gofile, tpe) - if err != nil { - return fmt.Errorf("embedded struct: %v", err) - } - file, _, ts, err := findSourceFile(pkg, tpe.Name) - if err != nil { - return fmt.Errorf("embedded struct: %v", err) - } - if st, ok := ts.Type.(*ast.StructType); ok { - return rp.parseStructType(file, response, st, seenPreviously) - } - case *ast.SelectorExpr: - // look up package, file and then type - pkg, err := rp.scp.packageForSelector(gofile, tpe.X) - if err != nil { - return fmt.Errorf("embedded struct: %v", err) - } - file, _, ts, err := findSourceFile(pkg, tpe.Sel.Name) - if err != nil { - return fmt.Errorf("embedded struct: %v", err) - } - if st, ok := ts.Type.(*ast.StructType); ok { - return rp.parseStructType(file, response, st, seenPreviously) - } - case *ast.StarExpr: - return rp.parseEmbeddedStruct(gofile, response, tpe.X, seenPreviously) - } - fmt.Printf("1%#v\n", expr) - return fmt.Errorf("unable to resolve embedded struct for: %v", expr) -} - -func (rp *responseParser) parseStructType(gofile *ast.File, response *spec.Response, tpe *ast.StructType, seenPreviously map[string]struct{}) error { - if tpe.Fields != nil { - - seenProperties := seenPreviously - - for _, fld := range tpe.Fields.List { - if len(fld.Names) == 0 { - // when the embedded struct is annotated with swagger:allOf it will be used as allOf property - // otherwise the fields will just be included as normal properties - if err := rp.parseEmbeddedStruct(gofile, response, fld.Type, seenProperties); err != nil { - return err - } - } - } - - for _, fld := range tpe.Fields.List { - if len(fld.Names) > 0 && fld.Names[0] != nil && fld.Names[0].IsExported() { - nm, ignore, _, err := parseJSONTag(fld) - if err != nil { - return err - } - if ignore { - continue - } - - var in string - // scan for param location first, this changes some behavior down the line - if fld.Doc != nil { - for _, cmt := range fld.Doc.List { - for _, line := range strings.Split(cmt.Text, "\n") { - matches := rxIn.FindStringSubmatch(line) - if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 { - in = strings.TrimSpace(matches[1]) - } - } - } - } - - ps := response.Headers[nm] - - // support swagger:file for response - // An API operation can return a file, such as an image or PDF. In this case, - // define the response schema with type: file and specify the appropriate MIME types in the produces section. - if fld.Doc != nil && fileParam(fld.Doc) { - response.Schema = &spec.Schema{} - response.Schema.Typed("file", "") - } else if err := rp.scp.parseNamedType(gofile, fld.Type, responseTypable{in, &ps, response}); err != nil { - return err - } - - if strfmtName, ok := strfmtName(fld.Doc); ok { - ps.Typed("string", strfmtName) - } - - sp := new(sectionedParser) - sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) } - sp.taggers = []tagParser{ - newSingleLineTagParser("maximum", &setMaximum{headerValidations{&ps}, rxf(rxMaximumFmt, "")}), - newSingleLineTagParser("minimum", &setMinimum{headerValidations{&ps}, rxf(rxMinimumFmt, "")}), - newSingleLineTagParser("multipleOf", &setMultipleOf{headerValidations{&ps}, rxf(rxMultipleOfFmt, "")}), - newSingleLineTagParser("minLength", &setMinLength{headerValidations{&ps}, rxf(rxMinLengthFmt, "")}), - newSingleLineTagParser("maxLength", &setMaxLength{headerValidations{&ps}, rxf(rxMaxLengthFmt, "")}), - newSingleLineTagParser("pattern", &setPattern{headerValidations{&ps}, rxf(rxPatternFmt, "")}), - newSingleLineTagParser("collectionFormat", &setCollectionFormat{headerValidations{&ps}, rxf(rxCollectionFormatFmt, "")}), - newSingleLineTagParser("minItems", &setMinItems{headerValidations{&ps}, rxf(rxMinItemsFmt, "")}), - newSingleLineTagParser("maxItems", &setMaxItems{headerValidations{&ps}, rxf(rxMaxItemsFmt, "")}), - newSingleLineTagParser("unique", &setUnique{headerValidations{&ps}, rxf(rxUniqueFmt, "")}), - newSingleLineTagParser("enum", &setEnum{headerValidations{&ps}, rxf(rxEnumFmt, "")}), - newSingleLineTagParser("default", &setDefault{&ps.SimpleSchema, headerValidations{&ps}, rxf(rxDefaultFmt, "")}), - newSingleLineTagParser("example", &setExample{&ps.SimpleSchema, headerValidations{&ps}, rxf(rxExampleFmt, "")}), - } - itemsTaggers := func(items *spec.Items, level int) []tagParser { - // the expression is 1-index based not 0-index - itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1) - - return []tagParser{ - newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{itemsValidations{items}, rxf(rxEnumFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&items.SimpleSchema, itemsValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&items.SimpleSchema, itemsValidations{items}, rxf(rxExampleFmt, itemsPrefix)}), - } - } - - var parseArrayTypes func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) - parseArrayTypes = func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) { - if items == nil { - return []tagParser{}, nil - } - switch iftpe := expr.(type) { - case *ast.ArrayType: - eleTaggers := itemsTaggers(items, level) - sp.taggers = append(eleTaggers, sp.taggers...) - otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Items, level+1) - if err != nil { - return nil, err - } - return otherTaggers, nil - case *ast.Ident: - taggers := []tagParser{} - if iftpe.Obj == nil { - taggers = itemsTaggers(items, level) - } - otherTaggers, err := parseArrayTypes(expr, items.Items, level+1) - if err != nil { - return nil, err - } - return append(taggers, otherTaggers...), nil - case *ast.StarExpr: - otherTaggers, err := parseArrayTypes(iftpe.X, items, level) - if err != nil { - return nil, err - } - return otherTaggers, nil - default: - return nil, fmt.Errorf("unknown field type ele for %q", nm) - } - } - // check if this is a primitive, if so parse the validations from the - // doc comments of the slice declaration. - if ftped, ok := fld.Type.(*ast.ArrayType); ok { - taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0) - if err != nil { - return err - } - sp.taggers = append(taggers, sp.taggers...) - } - - if err := sp.Parse(fld.Doc); err != nil { - return err - } - - if in != "body" { - seenProperties[nm] = struct{}{} - if response.Headers == nil { - response.Headers = make(map[string]spec.Header) - } - response.Headers[nm] = ps - } - } - } - - for k := range response.Headers { - if _, ok := seenProperties[k]; !ok { - delete(response.Headers, k) - } - } - } - - return nil -} diff --git a/vendor/github.com/go-swagger/go-swagger/scan/route_params.go b/vendor/github.com/go-swagger/go-swagger/scan/route_params.go deleted file mode 100644 index 6dd17f6b4..000000000 --- a/vendor/github.com/go-swagger/go-swagger/scan/route_params.go +++ /dev/null @@ -1,253 +0,0 @@ -//go:build !go1.11 -// +build !go1.11 - -package scan - -import ( - "errors" - "strconv" - "strings" - - "github.com/go-openapi/spec" -) - -const ( - // ParamDescriptionKey indicates the tag used to define a parameter description in swagger:route - ParamDescriptionKey = "description" - // ParamNameKey indicates the tag used to define a parameter name in swagger:route - ParamNameKey = "name" - // ParamInKey indicates the tag used to define a parameter location in swagger:route - ParamInKey = "in" - // ParamRequiredKey indicates the tag used to declare whether a parameter is required in swagger:route - ParamRequiredKey = "required" - // ParamTypeKey indicates the tag used to define the parameter type in swagger:route - ParamTypeKey = "type" - // ParamAllowEmptyKey indicates the tag used to indicate whether a parameter allows empty values in swagger:route - ParamAllowEmptyKey = "allowempty" - - // SchemaMinKey indicates the tag used to indicate the minimum value allowed for this type in swagger:route - SchemaMinKey = "min" - // SchemaMaxKey indicates the tag used to indicate the maximum value allowed for this type in swagger:route - SchemaMaxKey = "max" - // SchemaEnumKey indicates the tag used to specify the allowed values for this type in swagger:route - SchemaEnumKey = "enum" - // SchemaFormatKey indicates the expected format for this field in swagger:route - SchemaFormatKey = "format" - // SchemaDefaultKey indicates the default value for this field in swagger:route - SchemaDefaultKey = "default" - // SchemaMinLenKey indicates the minimum length this field in swagger:route - SchemaMinLenKey = "minlength" - // SchemaMaxLenKey indicates the minimum length this field in swagger:route - SchemaMaxLenKey = "maxlength" - - // TypeArray is the identifier for an array type in swagger:route - TypeArray = "array" - // TypeNumber is the identifier for a number type in swagger:route - TypeNumber = "number" - // TypeInteger is the identifier for an integer type in swagger:route - TypeInteger = "integer" - // TypeBoolean is the identifier for a boolean type in swagger:route - TypeBoolean = "boolean" - // TypeBool is the identifier for a boolean type in swagger:route - TypeBool = "bool" - // TypeObject is the identifier for an object type in swagger:route - TypeObject = "object" - // TypeString is the identifier for a string type in swagger:route - TypeString = "string" -) - -var ( - validIn = []string{"path", "query", "header", "body", "form"} - basicTypes = []string{TypeInteger, TypeNumber, TypeString, TypeBoolean, TypeBool, TypeArray} -) - -func newSetParams(params []*spec.Parameter, setter func([]*spec.Parameter)) *setOpParams { - return &setOpParams{ - set: setter, - parameters: params, - } -} - -type setOpParams struct { - set func([]*spec.Parameter) - parameters []*spec.Parameter -} - -func (s *setOpParams) Matches(line string) bool { - return rxParameters.MatchString(line) -} - -func (s *setOpParams) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - - var current *spec.Parameter - var extraData map[string]string - - for _, line := range lines { - l := strings.TrimSpace(line) - - if strings.HasPrefix(l, "+") { - s.finalizeParam(current, extraData) - current = new(spec.Parameter) - extraData = make(map[string]string) - l = strings.TrimPrefix(l, "+") - } - - kv := strings.SplitN(l, ":", 2) - - if len(kv) <= 1 { - continue - } - - key := strings.ToLower(strings.TrimSpace(kv[0])) - value := strings.TrimSpace(kv[1]) - - if current == nil { - return errors.New("invalid route/operation schema provided") - } - - switch key { - case ParamDescriptionKey: - current.Description = value - case ParamNameKey: - current.Name = value - case ParamInKey: - v := strings.ToLower(value) - if contains(validIn, v) { - current.In = v - } - case ParamRequiredKey: - if v, err := strconv.ParseBool(value); err == nil { - current.Required = v - } - case ParamTypeKey: - if current.Schema == nil { - current.Schema = new(spec.Schema) - } - if contains(basicTypes, value) { - current.Type = strings.ToLower(value) - if current.Type == TypeBool { - current.Type = TypeBoolean - } - } else { - if ref, err := spec.NewRef("#/definitions/" + value); err == nil { - current.Type = TypeObject - current.Schema.Ref = ref - } - } - current.Schema.Type = spec.StringOrArray{current.Type} - case ParamAllowEmptyKey: - if v, err := strconv.ParseBool(value); err == nil { - current.AllowEmptyValue = v - } - default: - extraData[key] = value - } - } - - s.finalizeParam(current, extraData) - s.set(s.parameters) - return nil -} - -func (s *setOpParams) finalizeParam(param *spec.Parameter, data map[string]string) { - if param == nil { - return - } - - processSchema(data, param) - s.parameters = append(s.parameters, param) -} - -func processSchema(data map[string]string, param *spec.Parameter) { - if param.Schema == nil { - return - } - - var enumValues []string - - for key, value := range data { - switch key { - case SchemaMinKey: - if t := getType(param.Schema); t == TypeNumber || t == TypeInteger { - v, _ := strconv.ParseFloat(value, 64) - param.Schema.Minimum = &v - } - case SchemaMaxKey: - if t := getType(param.Schema); t == TypeNumber || t == TypeInteger { - v, _ := strconv.ParseFloat(value, 64) - param.Schema.Maximum = &v - } - case SchemaMinLenKey: - if getType(param.Schema) == TypeArray { - v, _ := strconv.ParseInt(value, 10, 64) - param.Schema.MinLength = &v - } - case SchemaMaxLenKey: - if getType(param.Schema) == TypeArray { - v, _ := strconv.ParseInt(value, 10, 64) - param.Schema.MaxLength = &v - } - case SchemaEnumKey: - enumValues = strings.Split(value, ",") - case SchemaFormatKey: - param.Schema.Format = value - case SchemaDefaultKey: - param.Schema.Default = convert(param.Type, value) - } - } - - if param.Description != "" { - param.Schema.Description = param.Description - } - - convertEnum(param.Schema, enumValues) -} - -func convertEnum(schema *spec.Schema, enumValues []string) { - if len(enumValues) == 0 { - return - } - - var finalEnum []interface{} - for _, v := range enumValues { - finalEnum = append(finalEnum, convert(schema.Type[0], strings.TrimSpace(v))) - } - schema.Enum = finalEnum -} - -func convert(typeStr, valueStr string) interface{} { - switch typeStr { - case TypeInteger: - fallthrough - case TypeNumber: - if num, err := strconv.ParseFloat(valueStr, 64); err == nil { - return num - } - case TypeBoolean: - fallthrough - case TypeBool: - if b, err := strconv.ParseBool(valueStr); err == nil { - return b - } - } - return valueStr -} - -func getType(schema *spec.Schema) string { - if len(schema.Type) == 0 { - return "" - } - return schema.Type[0] -} - -func contains(arr []string, obj string) bool { - for _, v := range arr { - if v == obj { - return true - } - } - return false -} diff --git a/vendor/github.com/go-swagger/go-swagger/scan/routes.go b/vendor/github.com/go-swagger/go-swagger/scan/routes.go deleted file mode 100644 index 644d61900..000000000 --- a/vendor/github.com/go-swagger/go-swagger/scan/routes.go +++ /dev/null @@ -1,146 +0,0 @@ -//go:build !go1.11 -// +build !go1.11 - -// Copyright 2015 go-swagger maintainers -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package scan - -import ( - "fmt" - "go/ast" - - "github.com/go-openapi/spec" - - "golang.org/x/tools/go/loader" -) - -func opConsumesSetter(op *spec.Operation) func([]string) { - return func(consumes []string) { op.Consumes = consumes } -} - -func opProducesSetter(op *spec.Operation) func([]string) { - return func(produces []string) { op.Produces = produces } -} - -func opSchemeSetter(op *spec.Operation) func([]string) { - return func(schemes []string) { op.Schemes = schemes } -} - -func opSecurityDefsSetter(op *spec.Operation) func([]map[string][]string) { - return func(securityDefs []map[string][]string) { op.Security = securityDefs } -} - -func opResponsesSetter(op *spec.Operation) func(*spec.Response, map[int]spec.Response) { - return func(def *spec.Response, scr map[int]spec.Response) { - if op.Responses == nil { - op.Responses = new(spec.Responses) - } - op.Responses.Default = def - op.Responses.StatusCodeResponses = scr - } -} - -func opParamSetter(op *spec.Operation) func([]*spec.Parameter) { - return func(params []*spec.Parameter) { - for _, v := range params { - op.AddParam(v) - } - } -} - -func newRoutesParser(prog *loader.Program) *routesParser { - return &routesParser{ - program: prog, - } -} - -type routesParser struct { - program *loader.Program - definitions map[string]spec.Schema - operations map[string]*spec.Operation - responses map[string]spec.Response - parameters []*spec.Parameter -} - -var routeVendorExtensibleParser = vendorExtensibleParser{ - setExtensions: func(ext spec.Extensions, dest interface{}) { - dest.(*spec.Operation).Extensions = ext - }, -} - -func (rp *routesParser) Parse(gofile *ast.File, target interface{}, includeTags map[string]bool, excludeTags map[string]bool) error { - tgt := target.(*spec.Paths) - for _, comsec := range gofile.Comments { - content := parsePathAnnotation(rxRoute, comsec.List) - - if content.Method == "" { - continue // it's not, next! - } - - if !shouldAcceptTag(content.Tags, includeTags, excludeTags) { - if Debug { - fmt.Printf("route %s %s is ignored due to tag rules\n", content.Method, content.Path) - } - continue - } - - pthObj := tgt.Paths[content.Path] - op := setPathOperation( - content.Method, content.ID, - &pthObj, rp.operations[content.ID]) - - op.Tags = content.Tags - - sp := new(sectionedParser) - sp.setTitle = func(lines []string) { op.Summary = joinDropLast(lines) } - sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) } - sr := newSetResponses(rp.definitions, rp.responses, opResponsesSetter(op)) - spa := newSetParams(rp.parameters, opParamSetter(op)) - sp.taggers = []tagParser{ - newMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, opConsumesSetter(op)), false), - newMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, opProducesSetter(op)), false), - newSingleLineTagParser("Schemes", newSetSchemes(opSchemeSetter(op))), - newMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, opSecurityDefsSetter(op)), false), - newMultiLineTagParser("Parameters", spa, false), - newMultiLineTagParser("Responses", sr, false), - newMultiLineTagParser("YAMLExtensionsBlock", newYamlParser(rxExtensions, routeVendorExtensibleParser.ParseInto(op)), true), - } - if err := sp.Parse(content.Remaining); err != nil { - return fmt.Errorf("operation (%s): %v", op.ID, err) - } - - if tgt.Paths == nil { - tgt.Paths = make(map[string]spec.PathItem) - } - tgt.Paths[content.Path] = pthObj - } - - return nil -} - -func shouldAcceptTag(tags []string, includeTags map[string]bool, excludeTags map[string]bool) bool { - for _, tag := range tags { - if len(includeTags) > 0 { - if includeTags[tag] { - return true - } - } else if len(excludeTags) > 0 { - if excludeTags[tag] { - return false - } - } - } - return len(includeTags) <= 0 -} diff --git a/vendor/github.com/go-swagger/go-swagger/scan/scanner.go b/vendor/github.com/go-swagger/go-swagger/scan/scanner.go deleted file mode 100644 index b07616735..000000000 --- a/vendor/github.com/go-swagger/go-swagger/scan/scanner.go +++ /dev/null @@ -1,974 +0,0 @@ -//go:build !go1.11 -// +build !go1.11 - -// Copyright 2015 go-swagger maintainers -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package scan - -import ( - "encoding/json" - "errors" - "fmt" - "go/ast" - "go/build" - goparser "go/parser" - "go/types" - "log" - "os" - "regexp" - "strings" - - "github.com/go-openapi/loads/fmts" - "github.com/go-openapi/spec" - "github.com/go-openapi/swag" - "golang.org/x/tools/go/loader" - yaml "gopkg.in/yaml.v3" -) - -const ( - rxMethod = "(\\p{L}+)" - rxPath = "((?:/[\\p{L}\\p{N}\\p{Pd}\\p{Pc}{}\\-\\.\\?_~%!$&'()*+,;=:@/]*)+/?)" - rxOpTags = "(\\p{L}[\\p{L}\\p{N}\\p{Pd}\\.\\p{Pc}\\p{Zs}]+)" - rxOpID = "((?:\\p{L}[\\p{L}\\p{N}\\p{Pd}\\p{Pc}]+)+)" - - rxMaximumFmt = "%s[Mm]ax(?:imum)?\\p{Zs}*:\\p{Zs}*([\\<=])?\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$" - rxMinimumFmt = "%s[Mm]in(?:imum)?\\p{Zs}*:\\p{Zs}*([\\>=])?\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$" - rxMultipleOfFmt = "%s[Mm]ultiple\\p{Zs}*[Oo]f\\p{Zs}*:\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$" - - rxMaxLengthFmt = "%s[Mm]ax(?:imum)?(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ll]en(?:gth)?)\\p{Zs}*:\\p{Zs}*(\\p{N}+)$" - rxMinLengthFmt = "%s[Mm]in(?:imum)?(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ll]en(?:gth)?)\\p{Zs}*:\\p{Zs}*(\\p{N}+)$" - rxPatternFmt = "%s[Pp]attern\\p{Zs}*:\\p{Zs}*(.*)$" - rxCollectionFormatFmt = "%s[Cc]ollection(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ff]ormat)\\p{Zs}*:\\p{Zs}*(.*)$" - rxEnumFmt = "%s[Ee]num\\p{Zs}*:\\p{Zs}*(.*)$" - rxDefaultFmt = "%s[Dd]efault\\p{Zs}*:\\p{Zs}*(.*)$" - rxExampleFmt = "%s[Ee]xample\\p{Zs}*:\\p{Zs}*(.*)$" - - rxMaxItemsFmt = "%s[Mm]ax(?:imum)?(?:\\p{Zs}*|[\\p{Pd}\\p{Pc}]|\\.)?[Ii]tems\\p{Zs}*:\\p{Zs}*(\\p{N}+)$" - rxMinItemsFmt = "%s[Mm]in(?:imum)?(?:\\p{Zs}*|[\\p{Pd}\\p{Pc}]|\\.)?[Ii]tems\\p{Zs}*:\\p{Zs}*(\\p{N}+)$" - rxUniqueFmt = "%s[Uu]nique\\p{Zs}*:\\p{Zs}*(true|false)$" - - rxItemsPrefixFmt = "(?:[Ii]tems[\\.\\p{Zs}]*){%d}" -) - -var ( - rxSwaggerAnnotation = regexp.MustCompile(`swagger:([\p{L}\p{N}\p{Pd}\p{Pc}]+)`) - rxFileUpload = regexp.MustCompile(`swagger:file`) - rxStrFmt = regexp.MustCompile(`swagger:strfmt\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`) - rxAlias = regexp.MustCompile(`swagger:alias`) - rxName = regexp.MustCompile(`swagger:name\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\.]+)$`) - rxAllOf = regexp.MustCompile(`swagger:allOf\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\.]+)?$`) - rxModelOverride = regexp.MustCompile(`swagger:model\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`) - rxResponseOverride = regexp.MustCompile(`swagger:response\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`) - rxParametersOverride = regexp.MustCompile(`swagger:parameters\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\p{Zs}]+)$`) - rxEnum = regexp.MustCompile(`swagger:enum\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`) - rxIgnoreOverride = regexp.MustCompile(`swagger:ignore\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`) - rxDefault = regexp.MustCompile(`swagger:default\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`) - rxType = regexp.MustCompile(`swagger:type\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`) - rxRoute = regexp.MustCompile( - "swagger:route\\p{Zs}*" + - rxMethod + - "\\p{Zs}*" + - rxPath + - "(?:\\p{Zs}+" + - rxOpTags + - ")?\\p{Zs}+" + - rxOpID + "\\p{Zs}*$") - rxBeginYAMLSpec = regexp.MustCompile(`---\p{Zs}*$`) - rxUncommentHeaders = regexp.MustCompile(`^[\p{Zs}\t/\*-]*\|?`) - rxUncommentYAML = regexp.MustCompile(`^[\p{Zs}\t]*/*`) - rxOperation = regexp.MustCompile( - "swagger:operation\\p{Zs}*" + - rxMethod + - "\\p{Zs}*" + - rxPath + - "(?:\\p{Zs}+" + - rxOpTags + - ")?\\p{Zs}+" + - rxOpID + "\\p{Zs}*$") - - rxSpace = regexp.MustCompile(`\p{Zs}+`) - rxIndent = regexp.MustCompile(`\p{Zs}*/*\p{Zs}*[^\p{Zs}]`) - rxPunctuationEnd = regexp.MustCompile(`\p{Po}$`) - rxStripComments = regexp.MustCompile(`^[^\p{L}\p{N}\p{Pd}\p{Pc}\+]*`) - rxStripTitleComments = regexp.MustCompile(`^[^\p{L}]*[Pp]ackage\p{Zs}+[^\p{Zs}]+\p{Zs}*`) - rxAllowedExtensions = regexp.MustCompile(`^[Xx]-`) - - rxIn = regexp.MustCompile(`[Ii]n\p{Zs}*:\p{Zs}*(query|path|header|body|formData)$`) - rxRequired = regexp.MustCompile(`[Rr]equired\p{Zs}*:\p{Zs}*(true|false)$`) - rxDiscriminator = regexp.MustCompile(`[Dd]iscriminator\p{Zs}*:\p{Zs}*(true|false)$`) - rxReadOnly = regexp.MustCompile(`[Rr]ead(?:\p{Zs}*|[\p{Pd}\p{Pc}])?[Oo]nly\p{Zs}*:\p{Zs}*(true|false)$`) - rxConsumes = regexp.MustCompile(`[Cc]onsumes\p{Zs}*:`) - rxProduces = regexp.MustCompile(`[Pp]roduces\p{Zs}*:`) - rxSecuritySchemes = regexp.MustCompile(`[Ss]ecurity\p{Zs}*:`) - rxSecurity = regexp.MustCompile(`[Ss]ecurity\p{Zs}*[Dd]efinitions:`) - rxResponses = regexp.MustCompile(`[Rr]esponses\p{Zs}*:`) - rxParameters = regexp.MustCompile(`[Pp]arameters\p{Zs}*:`) - rxSchemes = regexp.MustCompile(`[Ss]chemes\p{Zs}*:\p{Zs}*((?:(?:https?|HTTPS?|wss?|WSS?)[\p{Zs},]*)+)$`) - rxVersion = regexp.MustCompile(`[Vv]ersion\p{Zs}*:\p{Zs}*(.+)$`) - rxHost = regexp.MustCompile(`[Hh]ost\p{Zs}*:\p{Zs}*(.+)$`) - rxBasePath = regexp.MustCompile(`[Bb]ase\p{Zs}*-*[Pp]ath\p{Zs}*:\p{Zs}*` + rxPath + "$") - rxLicense = regexp.MustCompile(`[Ll]icense\p{Zs}*:\p{Zs}*(.+)$`) - rxContact = regexp.MustCompile(`[Cc]ontact\p{Zs}*-?(?:[Ii]info\p{Zs}*)?:\p{Zs}*(.+)$`) - rxTOS = regexp.MustCompile(`[Tt](:?erms)?\p{Zs}*-?[Oo]f?\p{Zs}*-?[Ss](?:ervice)?\p{Zs}*:`) - rxExtensions = regexp.MustCompile(`[Ee]xtensions\p{Zs}*:`) - rxInfoExtensions = regexp.MustCompile(`[In]nfo\p{Zs}*[Ee]xtensions:`) - // currently unused: rxExample = regexp.MustCompile(`[Ex]ample\p{Zs}*:\p{Zs}*(.*)$`) -) - -// Many thanks go to https://github.com/yvasiyarov/swagger -// this is loosely based on that implementation but for swagger 2.0 - -func joinDropLast(lines []string) string { - l := len(lines) - lns := lines - if l > 0 && len(strings.TrimSpace(lines[l-1])) == 0 { - lns = lines[:l-1] - } - return strings.Join(lns, "\n") -} - -func removeEmptyLines(lines []string) (notEmpty []string) { - for _, l := range lines { - if len(strings.TrimSpace(l)) > 0 { - notEmpty = append(notEmpty, l) - } - } - return -} - -func rxf(rxp, ar string) *regexp.Regexp { - return regexp.MustCompile(fmt.Sprintf(rxp, ar)) -} - -// The Opts for the application scanner. -type Opts struct { - BasePath string - Input *spec.Swagger - ScanModels bool - BuildTags string - Include []string - Exclude []string - IncludeTags []string - ExcludeTags []string -} - -func safeConvert(str string) bool { - b, err := swag.ConvertBool(str) - if err != nil { - return false - } - return b -} - -// Debug is true when process is run with DEBUG=1 env var -var Debug = safeConvert(os.Getenv("DEBUG")) - -// Application scans the application and builds a swagger spec based on the information from the code files. -// When there are includes provided, only those files are considered for the initial discovery. -// Similarly the excludes will exclude an item from initial discovery through scanning for annotations. -// When something in the discovered items requires a type that is contained in the includes or excludes it will still be -// in the spec. -func Application(opts Opts) (*spec.Swagger, error) { - parser, err := newAppScanner(&opts) - - if err != nil { - return nil, err - } - return parser.Parse() -} - -// appScanner the global context for scanning a go application -// into a swagger specification -type appScanner struct { - loader *loader.Config - prog *loader.Program - classifier *programClassifier - discovered []schemaDecl - input *spec.Swagger - definitions map[string]spec.Schema - responses map[string]spec.Response - operations map[string]*spec.Operation - scanModels bool - includeTags map[string]bool - excludeTas map[string]bool - - // MainPackage the path to find the main class in - MainPackage string -} - -// newAppScanner creates a new api parser -func newAppScanner(opts *Opts) (*appScanner, error) { - if Debug { - log.Println("scanning packages discovered through entrypoint @ ", opts.BasePath) - } - var ldr loader.Config - ldr.ParserMode = goparser.ParseComments - ldr.Import(opts.BasePath) - if opts.BuildTags != "" { - ldr.Build = &build.Default - ldr.Build.BuildTags = strings.Split(opts.BuildTags, ",") - } - ldr.TypeChecker = types.Config{FakeImportC: true} - prog, err := ldr.Load() - if err != nil { - return nil, err - } - - var includes, excludes packageFilters - if len(opts.Include) > 0 { - for _, include := range opts.Include { - includes = append(includes, packageFilter{Name: include}) - } - } - if len(opts.Exclude) > 0 { - for _, exclude := range opts.Exclude { - excludes = append(excludes, packageFilter{Name: exclude}) - } - } - includeTags := make(map[string]bool) - for _, includeTag := range opts.IncludeTags { - includeTags[includeTag] = true - } - excludeTags := make(map[string]bool) - for _, excludeTag := range opts.ExcludeTags { - excludeTags[excludeTag] = true - } - - input := opts.Input - if input == nil { - input = new(spec.Swagger) - input.Swagger = "2.0" - } - - if input.Paths == nil { - input.Paths = new(spec.Paths) - } - if input.Definitions == nil { - input.Definitions = make(map[string]spec.Schema) - } - if input.Responses == nil { - input.Responses = make(map[string]spec.Response) - } - if input.Extensions == nil { - input.Extensions = make(spec.Extensions) - } - - return &appScanner{ - MainPackage: opts.BasePath, - prog: prog, - input: input, - loader: &ldr, - operations: collectOperationsFromInput(input), - definitions: input.Definitions, - responses: input.Responses, - scanModels: opts.ScanModels, - classifier: &programClassifier{ - Includes: includes, - Excludes: excludes, - }, - includeTags: includeTags, - excludeTas: excludeTags, - }, nil -} - -func collectOperationsFromInput(input *spec.Swagger) map[string]*spec.Operation { - operations := make(map[string]*spec.Operation) - if input != nil && input.Paths != nil { - for _, pth := range input.Paths.Paths { - if pth.Get != nil { - operations[pth.Get.ID] = pth.Get - } - if pth.Post != nil { - operations[pth.Post.ID] = pth.Post - } - if pth.Put != nil { - operations[pth.Put.ID] = pth.Put - } - if pth.Patch != nil { - operations[pth.Patch.ID] = pth.Patch - } - if pth.Delete != nil { - operations[pth.Delete.ID] = pth.Delete - } - if pth.Head != nil { - operations[pth.Head.ID] = pth.Head - } - if pth.Options != nil { - operations[pth.Options.ID] = pth.Options - } - } - } - return operations -} - -// Parse produces a swagger object for an application -func (a *appScanner) Parse() (*spec.Swagger, error) { - // classification still includes files that are completely commented out - cp, err := a.classifier.Classify(a.prog) - if err != nil { - return nil, err - } - - // build models dictionary - if a.scanModels { - for _, modelsFile := range cp.Models { - if err := a.parseSchema(modelsFile); err != nil { - return nil, err - } - } - } - - // build parameters dictionary - for _, paramsFile := range cp.Parameters { - if err := a.parseParameters(paramsFile); err != nil { - return nil, err - } - } - - // build responses dictionary - for _, responseFile := range cp.Responses { - if err := a.parseResponses(responseFile); err != nil { - return nil, err - } - } - - // build definitions dictionary - if err := a.processDiscovered(); err != nil { - return nil, err - } - - // build paths dictionary - for _, routeFile := range cp.Routes { - if err := a.parseRoutes(routeFile); err != nil { - return nil, err - } - } - for _, operationFile := range cp.Operations { - if err := a.parseOperations(operationFile); err != nil { - return nil, err - } - } - - // build swagger object - for _, metaFile := range cp.Meta { - if err := a.parseMeta(metaFile); err != nil { - return nil, err - } - } - - if a.input.Swagger == "" { - a.input.Swagger = "2.0" - } - - return a.input, nil -} - -func (a *appScanner) processDiscovered() error { - // loop over discovered until all the items are in definitions - keepGoing := len(a.discovered) > 0 - for keepGoing { - var queue []schemaDecl - for _, d := range a.discovered { - if _, ok := a.definitions[d.Name]; !ok { - queue = append(queue, d) - } - } - a.discovered = nil - for _, sd := range queue { - if err := a.parseDiscoveredSchema(sd); err != nil { - return err - } - } - keepGoing = len(a.discovered) > 0 - } - - return nil -} - -func (a *appScanner) parseSchema(file *ast.File) error { - sp := newSchemaParser(a.prog) - if err := sp.Parse(file, a.definitions); err != nil { - return err - } - a.discovered = append(a.discovered, sp.postDecls...) - return nil -} - -func (a *appScanner) parseDiscoveredSchema(sd schemaDecl) error { - sp := newSchemaParser(a.prog) - sp.discovered = &sd - - if err := sp.Parse(sd.File, a.definitions); err != nil { - return err - } - a.discovered = append(a.discovered, sp.postDecls...) - return nil -} - -func (a *appScanner) parseRoutes(file *ast.File) error { - rp := newRoutesParser(a.prog) - rp.operations = a.operations - rp.definitions = a.definitions - rp.responses = a.responses - - return rp.Parse(file, a.input.Paths, a.includeTags, a.excludeTas) -} - -func (a *appScanner) parseOperations(file *ast.File) error { - op := newOperationsParser(a.prog) - op.operations = a.operations - op.definitions = a.definitions - op.responses = a.responses - return op.Parse(file, a.input.Paths, a.includeTags, a.excludeTas) -} - -func (a *appScanner) parseParameters(file *ast.File) error { - rp := newParameterParser(a.prog) - if err := rp.Parse(file, a.operations); err != nil { - return err - } - a.discovered = append(a.discovered, rp.postDecls...) - a.discovered = append(a.discovered, rp.scp.postDecls...) - return nil -} - -func (a *appScanner) parseResponses(file *ast.File) error { - rp := newResponseParser(a.prog) - if err := rp.Parse(file, a.responses); err != nil { - return err - } - a.discovered = append(a.discovered, rp.postDecls...) - a.discovered = append(a.discovered, rp.scp.postDecls...) - return nil -} - -func (a *appScanner) parseMeta(file *ast.File) error { - return newMetaParser(a.input).Parse(file.Doc) -} - -// MustExpandPackagePath gets the real package path on disk -func (a *appScanner) MustExpandPackagePath(packagePath string) string { - pkgRealpath := swag.FindInGoSearchPath(packagePath) - if pkgRealpath == "" { - log.Fatalf("Can't find package %s \n", packagePath) - } - - return pkgRealpath -} - -type swaggerTypable interface { - Typed(string, string) - SetRef(spec.Ref) - Items() swaggerTypable - WithEnum(...interface{}) - Schema() *spec.Schema - Level() int -} - -// Map all Go builtin types that have Json representation to Swagger/Json types. -// See https://golang.org/pkg/builtin/ and http://swagger.io/specification/ -func swaggerSchemaForType(typeName string, prop swaggerTypable) error { - switch typeName { - case "bool": - prop.Typed("boolean", "") - case "byte": - prop.Typed("integer", "uint8") - case "complex128", "complex64": - return fmt.Errorf("unsupported builtin %q (no JSON marshaller)", typeName) - case "error": - // TODO: error is often marshalled into a string but not always (e.g. errors package creates - // errors that are marshalled into an empty object), this could be handled the same way - // custom JSON marshallers are handled (in future) - prop.Typed("string", "") - case "float32": - prop.Typed("number", "float") - case "float64": - prop.Typed("number", "double") - case "int": - prop.Typed("integer", "int64") - case "int16": - prop.Typed("integer", "int16") - case "int32": - prop.Typed("integer", "int32") - case "int64": - prop.Typed("integer", "int64") - case "int8": - prop.Typed("integer", "int8") - case "rune": - prop.Typed("integer", "int32") - case "string": - prop.Typed("string", "") - case "uint": - prop.Typed("integer", "uint64") - case "uint16": - prop.Typed("integer", "uint16") - case "uint32": - prop.Typed("integer", "uint32") - case "uint64": - prop.Typed("integer", "uint64") - case "uint8": - prop.Typed("integer", "uint8") - case "uintptr": - prop.Typed("integer", "uint64") - default: - return fmt.Errorf("unsupported type %q", typeName) - } - return nil -} - -func newMultiLineTagParser(name string, parser valueParser, skipCleanUp bool) tagParser { - return tagParser{ - Name: name, - MultiLine: true, - SkipCleanUp: skipCleanUp, - Parser: parser, - } -} - -func newSingleLineTagParser(name string, parser valueParser) tagParser { - return tagParser{ - Name: name, - MultiLine: false, - SkipCleanUp: false, - Parser: parser, - } -} - -type tagParser struct { - Name string - MultiLine bool - SkipCleanUp bool - Lines []string - Parser valueParser -} - -func (st *tagParser) Matches(line string) bool { - return st.Parser.Matches(line) -} - -func (st *tagParser) Parse(lines []string) error { - return st.Parser.Parse(lines) -} - -func newYamlParser(rx *regexp.Regexp, setter func(json.RawMessage) error) valueParser { - return &yamlParser{ - set: setter, - rx: rx, - } -} - -type yamlParser struct { - set func(json.RawMessage) error - rx *regexp.Regexp -} - -func (y *yamlParser) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - - var uncommented []string - uncommented = append(uncommented, removeYamlIndent(lines)...) - - yamlContent := strings.Join(uncommented, "\n") - var yamlValue interface{} - err := yaml.Unmarshal([]byte(yamlContent), &yamlValue) - if err != nil { - return err - } - - var jsonValue json.RawMessage - jsonValue, err = fmts.YAMLToJSON(yamlValue) - if err != nil { - return err - } - - return y.set(jsonValue) -} - -func (y *yamlParser) Matches(line string) bool { - return y.rx.MatchString(line) -} - -// aggregates lines in header until it sees `---`, -// the beginning of a YAML spec -type yamlSpecScanner struct { - header []string - yamlSpec []string - setTitle func([]string) - setDescription func([]string) - workedOutTitle bool - title []string - skipHeader bool -} - -func cleanupScannerLines(lines []string, ur *regexp.Regexp, yamlBlock *regexp.Regexp) []string { - // bail early when there is nothing to parse - if len(lines) == 0 { - return lines - } - seenLine := -1 - var lastContent int - var uncommented []string - var startBlock bool - var yaml []string - for i, v := range lines { - if yamlBlock != nil && yamlBlock.MatchString(v) && !startBlock { - startBlock = true - if seenLine < 0 { - seenLine = i - } - continue - } - if startBlock { - if yamlBlock.MatchString(v) { - startBlock = false - uncommented = append(uncommented, removeIndent(yaml)...) - continue - } - yaml = append(yaml, v) - if v != "" { - if seenLine < 0 { - seenLine = i - } - lastContent = i - } - continue - } - str := ur.ReplaceAllString(v, "") - uncommented = append(uncommented, str) - if str != "" { - if seenLine < 0 { - seenLine = i - } - lastContent = i - } - } - - // fixes issue #50 - if seenLine == -1 { - return nil - } - return uncommented[seenLine : lastContent+1] -} - -// a shared function that can be used to split given headers -// into a title and description -func collectScannerTitleDescription(headers []string) (title, desc []string) { - hdrs := cleanupScannerLines(headers, rxUncommentHeaders, nil) - - idx := -1 - for i, line := range hdrs { - if strings.TrimSpace(line) == "" { - idx = i - break - } - } - - if idx > -1 { - title = hdrs[:idx] - if len(hdrs) > idx+1 { - desc = hdrs[idx+1:] - } else { - desc = nil - } - return - } - - if len(hdrs) > 0 { - line := hdrs[0] - if rxPunctuationEnd.MatchString(line) { - title = []string{line} - desc = hdrs[1:] - } else { - desc = hdrs - } - } - - return -} - -func (sp *yamlSpecScanner) collectTitleDescription() { - if sp.workedOutTitle { - return - } - if sp.setTitle == nil { - sp.header = cleanupScannerLines(sp.header, rxUncommentHeaders, nil) - return - } - - sp.workedOutTitle = true - sp.title, sp.header = collectScannerTitleDescription(sp.header) -} - -func (sp *yamlSpecScanner) Title() []string { - sp.collectTitleDescription() - return sp.title -} - -func (sp *yamlSpecScanner) Description() []string { - sp.collectTitleDescription() - return sp.header -} - -func (sp *yamlSpecScanner) Parse(doc *ast.CommentGroup) error { - if doc == nil { - return nil - } - var startedYAMLSpec bool -COMMENTS: - for _, c := range doc.List { - for _, line := range strings.Split(c.Text, "\n") { - if rxSwaggerAnnotation.MatchString(line) { - break COMMENTS // a new swagger: annotation terminates this parser - } - - if !startedYAMLSpec { - if rxBeginYAMLSpec.MatchString(line) { - startedYAMLSpec = true - sp.yamlSpec = append(sp.yamlSpec, line) - continue - } - - if !sp.skipHeader { - sp.header = append(sp.header, line) - } - - // no YAML spec yet, moving on - continue - } - - sp.yamlSpec = append(sp.yamlSpec, line) - } - } - if sp.setTitle != nil { - sp.setTitle(sp.Title()) - } - if sp.setDescription != nil { - sp.setDescription(sp.Description()) - } - return nil -} - -func (sp *yamlSpecScanner) UnmarshalSpec(u func([]byte) error) (err error) { - spec := cleanupScannerLines(sp.yamlSpec, rxUncommentYAML, nil) - if len(spec) == 0 { - return errors.New("no spec available to unmarshal") - } - - if !strings.Contains(spec[0], "---") { - return errors.New("yaml spec has to start with `---`") - } - - // remove indentation - spec = removeIndent(spec) - - // 1. parse yaml lines - yamlValue := make(map[interface{}]interface{}) - - yamlContent := strings.Join(spec, "\n") - err = yaml.Unmarshal([]byte(yamlContent), &yamlValue) - if err != nil { - return - } - - // 2. convert to json - var jsonValue json.RawMessage - jsonValue, err = fmts.YAMLToJSON(yamlValue) - if err != nil { - return - } - - // 3. unmarshal the json into an interface - var data []byte - data, err = jsonValue.MarshalJSON() - if err != nil { - return - } - err = u(data) - if err != nil { - return - } - - // all parsed, returning... - sp.yamlSpec = nil // spec is now consumed, so let's erase the parsed lines - return -} - -// removes indent base on the first line -func removeIndent(spec []string) []string { - loc := rxIndent.FindStringIndex(spec[0]) - if loc[1] > 0 { - for i := range spec { - if len(spec[i]) >= loc[1] { - spec[i] = spec[i][loc[1]-1:] - } - } - } - return spec -} - -// removes indent base on the first line -func removeYamlIndent(spec []string) []string { - loc := rxIndent.FindStringIndex(spec[0]) - var s []string - if loc[1] > 0 { - for i := range spec { - if len(spec[i]) >= loc[1] { - s = append(s, spec[i][loc[1]-1:]) - } - } - } - return s -} - -// aggregates lines in header until it sees a tag. -type sectionedParser struct { - header []string - matched map[string]tagParser - annotation valueParser - - seenTag bool - skipHeader bool - setTitle func([]string) - setDescription func([]string) - workedOutTitle bool - taggers []tagParser - currentTagger *tagParser - title []string - ignored bool -} - -func (st *sectionedParser) collectTitleDescription() { - if st.workedOutTitle { - return - } - if st.setTitle == nil { - st.header = cleanupScannerLines(st.header, rxUncommentHeaders, nil) - return - } - - st.workedOutTitle = true - st.title, st.header = collectScannerTitleDescription(st.header) -} - -func (st *sectionedParser) Title() []string { - st.collectTitleDescription() - return st.title -} - -func (st *sectionedParser) Description() []string { - st.collectTitleDescription() - return st.header -} - -func (st *sectionedParser) Parse(doc *ast.CommentGroup) error { - if doc == nil { - return nil - } -COMMENTS: - for _, c := range doc.List { - for _, line := range strings.Split(c.Text, "\n") { - if rxSwaggerAnnotation.MatchString(line) { - if rxIgnoreOverride.MatchString(line) { - st.ignored = true - break COMMENTS // an explicit ignore terminates this parser - } - if st.annotation == nil || !st.annotation.Matches(line) { - break COMMENTS // a new swagger: annotation terminates this parser - } - - _ = st.annotation.Parse([]string{line}) - if len(st.header) > 0 { - st.seenTag = true - } - continue - } - - var matched bool - for _, tagger := range st.taggers { - if tagger.Matches(line) { - st.seenTag = true - st.currentTagger = &tagger - matched = true - break - } - } - - if st.currentTagger == nil { - if !st.skipHeader && !st.seenTag { - st.header = append(st.header, line) - } - // didn't match a tag, moving on - continue - } - - if st.currentTagger.MultiLine && matched { - // the first line of a multiline tagger doesn't count - continue - } - - ts, ok := st.matched[st.currentTagger.Name] - if !ok { - ts = *st.currentTagger - } - ts.Lines = append(ts.Lines, line) - if st.matched == nil { - st.matched = make(map[string]tagParser) - } - st.matched[st.currentTagger.Name] = ts - - if !st.currentTagger.MultiLine { - st.currentTagger = nil - } - } - } - if st.setTitle != nil { - st.setTitle(st.Title()) - } - if st.setDescription != nil { - st.setDescription(st.Description()) - } - for _, mt := range st.matched { - if !mt.SkipCleanUp { - mt.Lines = cleanupScannerLines(mt.Lines, rxUncommentHeaders, nil) - } - if err := mt.Parse(mt.Lines); err != nil { - return err - } - } - return nil -} - -type vendorExtensibleParser struct { - setExtensions func(ext spec.Extensions, dest interface{}) -} - -func (extParser vendorExtensibleParser) ParseInto(dest interface{}) func(json.RawMessage) error { - return func(jsonValue json.RawMessage) error { - var jsonData spec.Extensions - err := json.Unmarshal(jsonValue, &jsonData) - if err != nil { - return err - } - for k := range jsonData { - if !rxAllowedExtensions.MatchString(k) { - return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k) - } - } - extParser.setExtensions(jsonData, dest) - return nil - } -} diff --git a/vendor/github.com/go-swagger/go-swagger/scan/schema.go b/vendor/github.com/go-swagger/go-swagger/scan/schema.go deleted file mode 100644 index 37ce6cf25..000000000 --- a/vendor/github.com/go-swagger/go-swagger/scan/schema.go +++ /dev/null @@ -1,1358 +0,0 @@ -//go:build !go1.11 -// +build !go1.11 - -// Copyright 2015 go-swagger maintainers -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package scan - -import ( - "fmt" - "go/ast" - "log" - "os" - "os/exec" - "path/filepath" - "reflect" - "strconv" - "strings" - - "golang.org/x/tools/go/loader" - - "github.com/go-openapi/spec" -) - -func addExtension(ve *spec.VendorExtensible, key string, value interface{}) { - if os.Getenv("SWAGGER_GENERATE_EXTENSION") == "false" { - return - } - - ve.AddExtension(key, value) -} - -type schemaTypable struct { - schema *spec.Schema - level int -} - -func (st schemaTypable) Typed(tpe, format string) { - st.schema.Typed(tpe, format) -} - -func (st schemaTypable) SetRef(ref spec.Ref) { - st.schema.Ref = ref -} - -func (st schemaTypable) Schema() *spec.Schema { - return st.schema -} - -func (st schemaTypable) Items() swaggerTypable { - if st.schema.Items == nil { - st.schema.Items = new(spec.SchemaOrArray) - } - if st.schema.Items.Schema == nil { - st.schema.Items.Schema = new(spec.Schema) - } - - st.schema.Typed("array", "") - return schemaTypable{st.schema.Items.Schema, st.level + 1} -} - -func (st schemaTypable) AdditionalProperties() swaggerTypable { - if st.schema.AdditionalProperties == nil { - st.schema.AdditionalProperties = new(spec.SchemaOrBool) - } - if st.schema.AdditionalProperties.Schema == nil { - st.schema.AdditionalProperties.Schema = new(spec.Schema) - } - - st.schema.Typed("object", "") - return schemaTypable{st.schema.AdditionalProperties.Schema, st.level + 1} -} - -func (st schemaTypable) Level() int { return st.level } - -func (st schemaTypable) WithEnum(values ...interface{}) { - st.schema.WithEnum(values...) -} - -type schemaValidations struct { - current *spec.Schema -} - -func (sv schemaValidations) SetMaximum(val float64, exclusive bool) { - sv.current.Maximum = &val - sv.current.ExclusiveMaximum = exclusive -} -func (sv schemaValidations) SetMinimum(val float64, exclusive bool) { - sv.current.Minimum = &val - sv.current.ExclusiveMinimum = exclusive -} -func (sv schemaValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val } -func (sv schemaValidations) SetMinItems(val int64) { sv.current.MinItems = &val } -func (sv schemaValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val } -func (sv schemaValidations) SetMinLength(val int64) { sv.current.MinLength = &val } -func (sv schemaValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val } -func (sv schemaValidations) SetPattern(val string) { sv.current.Pattern = val } -func (sv schemaValidations) SetUnique(val bool) { sv.current.UniqueItems = val } -func (sv schemaValidations) SetDefault(val interface{}) { sv.current.Default = val } -func (sv schemaValidations) SetExample(val interface{}) { sv.current.Example = val } -func (sv schemaValidations) SetEnum(val string) { - sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Format: sv.current.Format, Type: sv.current.Type[0]}) -} - -type schemaDecl struct { - File *ast.File - Decl *ast.GenDecl - TypeSpec *ast.TypeSpec - GoName string - Name string - annotated bool -} - -func newSchemaDecl(file *ast.File, decl *ast.GenDecl, ts *ast.TypeSpec) *schemaDecl { - sd := &schemaDecl{ - File: file, - Decl: decl, - TypeSpec: ts, - } - sd.inferNames() - return sd -} - -func (sd *schemaDecl) hasAnnotation() bool { - sd.inferNames() - return sd.annotated -} - -func (sd *schemaDecl) inferNames() (goName string, name string) { - if sd.GoName != "" { - goName, name = sd.GoName, sd.Name - return - } - goName = sd.TypeSpec.Name.Name - name = goName - if sd.Decl.Doc != nil { - DECLS: - for _, cmt := range sd.Decl.Doc.List { - for _, ln := range strings.Split(cmt.Text, "\n") { - matches := rxModelOverride.FindStringSubmatch(ln) - if len(matches) > 0 { - sd.annotated = true - } - if len(matches) > 1 && len(matches[1]) > 0 { - name = matches[1] - break DECLS - } - } - } - } - sd.GoName = goName - sd.Name = name - return -} - -type schemaParser struct { - program *loader.Program - postDecls []schemaDecl - known map[string]spec.Schema - discovered *schemaDecl -} - -func newSchemaParser(prog *loader.Program) *schemaParser { - scp := new(schemaParser) - scp.program = prog - scp.known = make(map[string]spec.Schema) - return scp -} - -func (scp *schemaParser) Parse(gofile *ast.File, target interface{}) error { - tgt := target.(map[string]spec.Schema) - for _, decl := range gofile.Decls { - gd, ok := decl.(*ast.GenDecl) - if !ok { - continue - } - for _, spc := range gd.Specs { - if ts, ok := spc.(*ast.TypeSpec); ok { - sd := newSchemaDecl(gofile, gd, ts) - if err := scp.parseDecl(tgt, sd); err != nil { - return err - } - } - } - } - return nil -} - -func (scp *schemaParser) parseDecl(definitions map[string]spec.Schema, decl *schemaDecl) error { - // check if there is a swagger:model tag that is followed by a word, - // this word is the type name for swagger - // the package and type are recorded in the extensions - // once type name is found convert it to a schema, by looking up the schema in the - // definitions dictionary that got passed into this parse method - - // if our schemaParser is parsing a discovered schemaDecl and it does not match - // the current schemaDecl we can skip parsing. - if scp.discovered != nil && scp.discovered.Name != decl.Name { - return nil - } - - decl.inferNames() - schema := definitions[decl.Name] - schPtr := &schema - - // analyze doc comment for the model - sp := new(sectionedParser) - sp.setTitle = func(lines []string) { schema.Title = joinDropLast(lines) } - sp.setDescription = func(lines []string) { schema.Description = joinDropLast(lines) } - if err := sp.Parse(decl.Decl.Doc); err != nil { - return err - } - - // if the type is marked to ignore, just return - if sp.ignored { - return nil - } - - // analyze struct body for fields etc - // each exported struct field: - // * gets a type mapped to a go primitive - // * perhaps gets a format - // * has to document the validations that apply for the type and the field - // * when the struct field points to a model it becomes a ref: #/definitions/ModelName - // * the first line of the comment is the title - // * the following lines are the description - switch tpe := decl.TypeSpec.Type.(type) { - case *ast.StructType: - if err := scp.parseStructType(decl.File, schPtr, tpe, make(map[string]string)); err != nil { - return err - } - case *ast.InterfaceType: - if err := scp.parseInterfaceType(decl.File, schPtr, tpe, make(map[string]string)); err != nil { - return err - } - case *ast.Ident: - prop := &schemaTypable{schPtr, 0} - if strfmtName, ok := strfmtName(decl.Decl.Doc); ok { - prop.Typed("string", strfmtName) - } else { - if err := scp.parseNamedType(decl.File, tpe, prop); err != nil { - return err - } - } - if enumName, ok := enumName(decl.Decl.Doc); ok { - var enumValues = getEnumValues(decl.File, enumName) - if len(enumValues) > 0 { - var typeName = reflect.TypeOf(enumValues[0]).String() - prop.WithEnum(enumValues...) - - err := swaggerSchemaForType(typeName, prop) - if err != nil { - return fmt.Errorf("file %s, error is: %v", decl.File.Name, err) - } - } - } - case *ast.SelectorExpr: - prop := &schemaTypable{schPtr, 0} - if strfmtName, ok := strfmtName(decl.Decl.Doc); ok { - prop.Typed("string", strfmtName) - } else { - if err := scp.parseNamedType(decl.File, tpe, prop); err != nil { - return err - } - } - - case *ast.ArrayType: - prop := &schemaTypable{schPtr, 0} - if strfmtName, ok := strfmtName(decl.Decl.Doc); ok { - prop.Items().Typed("string", strfmtName) - } else { - if err := scp.parseNamedType(decl.File, tpe, &schemaTypable{schPtr, 0}); err != nil { - return err - } - } - - case *ast.MapType: - prop := &schemaTypable{schPtr, 0} - if strfmtName, ok := strfmtName(decl.Decl.Doc); ok { - prop.AdditionalProperties().Typed("string", strfmtName) - } else { - if err := scp.parseNamedType(decl.File, tpe, &schemaTypable{schPtr, 0}); err != nil { - return err - } - } - default: - log.Printf("WARNING: Missing parser for a %T, skipping model: %s\n", tpe, decl.Name) - return nil - } - - if schPtr.Ref.String() == "" { - if decl.Name != decl.GoName { - addExtension(&schPtr.VendorExtensible, "x-go-name", decl.GoName) - } - for _, pkgInfo := range scp.program.AllPackages { - if pkgInfo.Importable { - for _, fil := range pkgInfo.Files { - if fil.Pos() == decl.File.Pos() { - addExtension(&schPtr.VendorExtensible, "x-go-package", pkgInfo.Pkg.Path()) - } - } - } - } - } - definitions[decl.Name] = schema - return nil -} - -func (scp *schemaParser) parseNamedType(gofile *ast.File, expr ast.Expr, prop swaggerTypable) error { - switch ftpe := expr.(type) { - case *ast.Ident: // simple value - pkg, err := scp.packageForFile(gofile, ftpe) - if err != nil { - return err - } - return scp.parseIdentProperty(pkg, ftpe, prop) - - case *ast.StarExpr: // pointer to something, optional by default - if err := scp.parseNamedType(gofile, ftpe.X, prop); err != nil { - return err - } - - case *ast.ArrayType: // slice type - if err := scp.parseNamedType(gofile, ftpe.Elt, prop.Items()); err != nil { - return err - } - - case *ast.StructType: - schema := prop.Schema() - if schema == nil { - return fmt.Errorf("items doesn't support embedded structs") - } - return scp.parseStructType(gofile, prop.Schema(), ftpe, make(map[string]string)) - - case *ast.SelectorExpr: - err := scp.typeForSelector(gofile, ftpe, prop) - return err - - case *ast.MapType: - // check if key is a string type, if not print a message - // and skip the map property. Only maps with string keys can go into additional properties - sch := prop.Schema() - if sch == nil { - return fmt.Errorf("items doesn't support maps") - } - if keyIdent, ok := ftpe.Key.(*ast.Ident); sch != nil && ok { - if keyIdent.Name == "string" { - if sch.AdditionalProperties == nil { - sch.AdditionalProperties = new(spec.SchemaOrBool) - } - sch.AdditionalProperties.Allows = false - if sch.AdditionalProperties.Schema == nil { - sch.AdditionalProperties.Schema = new(spec.Schema) - } - if err := scp.parseNamedType(gofile, ftpe.Value, schemaTypable{sch.AdditionalProperties.Schema, 0}); err != nil { - return err - } - sch.Typed("object", "") - } - } - - case *ast.InterfaceType: - prop.Schema().Typed("object", "") - default: - pos := "unknown file:unknown position" - if scp != nil { - if scp.program != nil { - if scp.program.Fset != nil { - pos = scp.program.Fset.Position(expr.Pos()).String() - } - } - } - return fmt.Errorf("expr (%s) is unsupported for a schema", pos) - } - return nil -} - -func (scp *schemaParser) parseEmbeddedType(gofile *ast.File, schema *spec.Schema, expr ast.Expr, seenPreviously map[string]string) error { - switch tpe := expr.(type) { - case *ast.Ident: - // do lookup of type - // take primitives into account, they should result in an error for swagger - pkg, err := scp.packageForFile(gofile, tpe) - if err != nil { - return err - } - file, _, ts, err := findSourceFile(pkg, tpe.Name) - if err != nil { - return err - } - - switch st := ts.Type.(type) { - case *ast.StructType: - return scp.parseStructType(file, schema, st, seenPreviously) - case *ast.InterfaceType: - return scp.parseInterfaceType(file, schema, st, seenPreviously) - default: - prop := &schemaTypable{schema, 0} - return scp.parseNamedType(gofile, st, prop) - } - - case *ast.SelectorExpr: - // look up package, file and then type - pkg, err := scp.packageForSelector(gofile, tpe.X) - if err != nil { - return fmt.Errorf("embedded struct: %v", err) - } - file, _, ts, err := findSourceFile(pkg, tpe.Sel.Name) - if err != nil { - return fmt.Errorf("embedded struct: %v", err) - } - if st, ok := ts.Type.(*ast.StructType); ok { - return scp.parseStructType(file, schema, st, seenPreviously) - } - if st, ok := ts.Type.(*ast.InterfaceType); ok { - return scp.parseInterfaceType(file, schema, st, seenPreviously) - } - case *ast.StarExpr: - return scp.parseEmbeddedType(gofile, schema, tpe.X, seenPreviously) - default: - return fmt.Errorf( - "parseEmbeddedType: unsupported type %v at position %#v", - expr, - scp.program.Fset.Position(tpe.Pos()), - ) - } - return fmt.Errorf("unable to resolve embedded struct for: %v", expr) -} - -func (scp *schemaParser) parseAllOfMember(gofile *ast.File, schema *spec.Schema, expr ast.Expr, seenPreviously map[string]string) error { - // TODO: check if struct is annotated with swagger:model or known in the definitions otherwise - var pkg *loader.PackageInfo - var file *ast.File - var gd *ast.GenDecl - var ts *ast.TypeSpec - var err error - - switch tpe := expr.(type) { - case *ast.Ident: - // do lookup of type - // take primitives into account, they should result in an error for swagger - pkg, err = scp.packageForFile(gofile, tpe) - if err != nil { - return err - } - file, gd, ts, err = findSourceFile(pkg, tpe.Name) - if err != nil { - return err - } - - case *ast.SelectorExpr: - // look up package, file and then type - pkg, err = scp.packageForSelector(gofile, tpe.X) - if err != nil { - return fmt.Errorf("embedded struct: %v", err) - } - file, gd, ts, err = findSourceFile(pkg, tpe.Sel.Name) - if err != nil { - return fmt.Errorf("embedded struct: %v", err) - } - default: - return fmt.Errorf("unable to resolve allOf member for: %v", expr) - } - - sd := newSchemaDecl(file, gd, ts) - if sd.hasAnnotation() && pkg.String() != "time" && ts.Name.Name != "Time" { - ref, err := spec.NewRef("#/definitions/" + sd.Name) - if err != nil { - return err - } - schema.Ref = ref - scp.postDecls = append(scp.postDecls, *sd) - } else { - switch st := ts.Type.(type) { - case *ast.StructType: - return scp.parseStructType(file, schema, st, seenPreviously) - case *ast.InterfaceType: - return scp.parseInterfaceType(file, schema, st, seenPreviously) - } - } - - return nil -} -func (scp *schemaParser) parseInterfaceType(gofile *ast.File, bschema *spec.Schema, tpe *ast.InterfaceType, seenPreviously map[string]string) error { - if tpe.Methods == nil { - return nil - } - - // first check if this has embedded interfaces, if so make sure to refer to those by ref - // when they are decorated with an allOf annotation - // go over the method list again and this time collect the nullary methods and parse the comments - // as if they are properties on a struct - var schema *spec.Schema - seenProperties := seenPreviously - hasAllOf := false - - for _, fld := range tpe.Methods.List { - if len(fld.Names) == 0 { - // if this created an allOf property then we have to rejig the schema var - // because all the fields collected that aren't from embedded structs should go in - // their own proper schema - // first process embedded structs in order of embedding - if allOfMember(fld.Doc) { - hasAllOf = true - if schema == nil { - schema = new(spec.Schema) - } - var newSch spec.Schema - // when the embedded struct is annotated with swagger:allOf it will be used as allOf property - // otherwise the fields will just be included as normal properties - if err := scp.parseAllOfMember(gofile, &newSch, fld.Type, seenProperties); err != nil { - return err - } - - if fld.Doc != nil { - for _, cmt := range fld.Doc.List { - for _, ln := range strings.Split(cmt.Text, "\n") { - matches := rxAllOf.FindStringSubmatch(ln) - ml := len(matches) - if ml > 1 { - mv := matches[ml-1] - if mv != "" { - addExtension(&bschema.VendorExtensible, "x-class", mv) - } - } - } - } - } - - bschema.AllOf = append(bschema.AllOf, newSch) - continue - } - - var newSch spec.Schema - // when the embedded struct is annotated with swagger:allOf it will be used as allOf property - // otherwise the fields will just be included as normal properties - if err := scp.parseEmbeddedType(gofile, &newSch, fld.Type, seenProperties); err != nil { - return err - } - bschema.AllOf = append(bschema.AllOf, newSch) - hasAllOf = true - } - } - - if schema == nil { - schema = bschema - } - // then add and possibly override values - if schema.Properties == nil { - schema.Properties = make(map[string]spec.Schema) - } - schema.Typed("object", "") - for _, fld := range tpe.Methods.List { - if mtpe, ok := fld.Type.(*ast.FuncType); ok && mtpe.Params.NumFields() == 0 && mtpe.Results.NumFields() == 1 { - gnm := fld.Names[0].Name - nm := gnm - if fld.Doc != nil { - for _, cmt := range fld.Doc.List { - for _, ln := range strings.Split(cmt.Text, "\n") { - matches := rxName.FindStringSubmatch(ln) - ml := len(matches) - if ml > 1 { - nm = matches[ml-1] - } - } - } - } - - ps := schema.Properties[nm] - if err := parseProperty(scp, gofile, mtpe.Results.List[0].Type, schemaTypable{&ps, 0}); err != nil { - return err - } - - if err := scp.createParser(nm, schema, &ps, fld).Parse(fld.Doc); err != nil { - return err - } - - if ps.Ref.String() == "" && nm != gnm { - addExtension(&ps.VendorExtensible, "x-go-name", gnm) - } - seenProperties[nm] = gnm - schema.Properties[nm] = ps - } - - } - if schema != nil && hasAllOf && len(schema.Properties) > 0 { - bschema.AllOf = append(bschema.AllOf, *schema) - } - for k := range schema.Properties { - if _, ok := seenProperties[k]; !ok { - delete(schema.Properties, k) - } - } - return nil -} - -func (scp *schemaParser) parseStructType(gofile *ast.File, bschema *spec.Schema, tpe *ast.StructType, seenPreviously map[string]string) error { - if tpe.Fields == nil { - return nil - } - var schema *spec.Schema - seenProperties := seenPreviously - hasAllOf := false - - for _, fld := range tpe.Fields.List { - if len(fld.Names) == 0 { - // if the field is annotated with swagger:ignore, ignore it - if ignored(fld.Doc) { - continue - } - - _, ignore, _, err := parseJSONTag(fld) - if err != nil { - return err - } - if ignore { - continue - } - - // if this created an allOf property then we have to rejig the schema var - // because all the fields collected that aren't from embedded structs should go in - // their own proper schema - // first process embedded structs in order of embedding - if allOfMember(fld.Doc) { - hasAllOf = true - if schema == nil { - schema = new(spec.Schema) - } - var newSch spec.Schema - // when the embedded struct is annotated with swagger:allOf it will be used as allOf property - // otherwise the fields will just be included as normal properties - if err := scp.parseAllOfMember(gofile, &newSch, fld.Type, seenProperties); err != nil { - return err - } - - if fld.Doc != nil { - for _, cmt := range fld.Doc.List { - for _, ln := range strings.Split(cmt.Text, "\n") { - matches := rxAllOf.FindStringSubmatch(ln) - ml := len(matches) - if ml > 1 { - mv := matches[ml-1] - if mv != "" { - addExtension(&bschema.VendorExtensible, "x-class", mv) - } - } - } - } - } - - bschema.AllOf = append(bschema.AllOf, newSch) - continue - } - if schema == nil { - schema = bschema - } - - // when the embedded struct is annotated with swagger:allOf it will be used as allOf property - // otherwise the fields will just be included as normal properties - if err := scp.parseEmbeddedType(gofile, schema, fld.Type, seenProperties); err != nil { - return err - } - } - } - if schema == nil { - schema = bschema - } - - // then add and possibly override values - if schema.Properties == nil { - schema.Properties = make(map[string]spec.Schema) - } - schema.Typed("object", "") - for _, fld := range tpe.Fields.List { - if len(fld.Names) > 0 && fld.Names[0] != nil && fld.Names[0].IsExported() { - // if the field is annotated with swagger:ignore, ignore it - if ignored(fld.Doc) { - continue - } - - gnm := fld.Names[0].Name - nm, ignore, isString, err := parseJSONTag(fld) - if err != nil { - return err - } - if ignore { - for seenTagName, seenFieldName := range seenPreviously { - if seenFieldName == gnm { - delete(schema.Properties, seenTagName) - break - } - } - continue - } - - ps := schema.Properties[nm] - if err := parseProperty(scp, gofile, fld.Type, schemaTypable{&ps, 0}); err != nil { - return err - } - if isString { - ps.Typed("string", ps.Format) - ps.Ref = spec.Ref{} - } - if strfmtName, ok := strfmtName(fld.Doc); ok { - ps.Typed("string", strfmtName) - ps.Ref = spec.Ref{} - } - - if err := scp.createParser(nm, schema, &ps, fld).Parse(fld.Doc); err != nil { - return err - } - - if ps.Ref.String() == "" && nm != gnm { - addExtension(&ps.VendorExtensible, "x-go-name", gnm) - } - // we have 2 cases: - // 1. field with different name override tag - // 2. field with different name removes tag - // so we need to save both tag&name - seenProperties[nm] = gnm - schema.Properties[nm] = ps - } - } - if schema != nil && hasAllOf && len(schema.Properties) > 0 { - bschema.AllOf = append(bschema.AllOf, *schema) - } - for k := range schema.Properties { - if _, ok := seenProperties[k]; !ok { - delete(schema.Properties, k) - } - } - return nil -} - -var schemaVendorExtensibleParser = vendorExtensibleParser{ - setExtensions: func(ext spec.Extensions, dest interface{}) { - dest.(*spec.Schema).Extensions = ext - }, -} - -func (scp *schemaParser) createParser(nm string, schema, ps *spec.Schema, fld *ast.Field) *sectionedParser { - sp := new(sectionedParser) - - schemeType, err := ps.Type.MarshalJSON() - if err != nil { - return nil - } - - if ps.Ref.String() == "" { - sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) } - sp.taggers = []tagParser{ - newSingleLineTagParser("maximum", &setMaximum{schemaValidations{ps}, rxf(rxMaximumFmt, "")}), - newSingleLineTagParser("minimum", &setMinimum{schemaValidations{ps}, rxf(rxMinimumFmt, "")}), - newSingleLineTagParser("multipleOf", &setMultipleOf{schemaValidations{ps}, rxf(rxMultipleOfFmt, "")}), - newSingleLineTagParser("minLength", &setMinLength{schemaValidations{ps}, rxf(rxMinLengthFmt, "")}), - newSingleLineTagParser("maxLength", &setMaxLength{schemaValidations{ps}, rxf(rxMaxLengthFmt, "")}), - newSingleLineTagParser("pattern", &setPattern{schemaValidations{ps}, rxf(rxPatternFmt, "")}), - newSingleLineTagParser("minItems", &setMinItems{schemaValidations{ps}, rxf(rxMinItemsFmt, "")}), - newSingleLineTagParser("maxItems", &setMaxItems{schemaValidations{ps}, rxf(rxMaxItemsFmt, "")}), - newSingleLineTagParser("unique", &setUnique{schemaValidations{ps}, rxf(rxUniqueFmt, "")}), - newSingleLineTagParser("enum", &setEnum{schemaValidations{ps}, rxf(rxEnumFmt, "")}), - newSingleLineTagParser("default", &setDefault{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{ps}, rxf(rxDefaultFmt, "")}), - newSingleLineTagParser("type", &setDefault{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{ps}, rxf(rxDefaultFmt, "")}), - newSingleLineTagParser("example", &setExample{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{ps}, rxf(rxExampleFmt, "")}), - newSingleLineTagParser("required", &setRequiredSchema{schema, nm}), - newSingleLineTagParser("readOnly", &setReadOnlySchema{ps}), - newSingleLineTagParser("discriminator", &setDiscriminator{schema, nm}), - newMultiLineTagParser("YAMLExtensionsBlock", newYamlParser(rxExtensions, schemaVendorExtensibleParser.ParseInto(ps)), true), - } - - itemsTaggers := func(items *spec.Schema, level int) []tagParser { - schemeType, err := items.Type.MarshalJSON() - if err != nil { - return nil - } - // the expression is 1-index based not 0-index - itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1) - return []tagParser{ - newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{schemaValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{schemaValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{schemaValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{schemaValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{schemaValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{schemaValidations{items}, rxf(rxPatternFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{schemaValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{schemaValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{schemaValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{schemaValidations{items}, rxf(rxEnumFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{items}, rxf(rxExampleFmt, itemsPrefix)}), - } - } - - var parseArrayTypes func(expr ast.Expr, items *spec.SchemaOrArray, level int) ([]tagParser, error) - parseArrayTypes = func(expr ast.Expr, items *spec.SchemaOrArray, level int) ([]tagParser, error) { - if items == nil || items.Schema == nil { - return []tagParser{}, nil - } - switch iftpe := expr.(type) { - case *ast.ArrayType: - eleTaggers := itemsTaggers(items.Schema, level) - sp.taggers = append(eleTaggers, sp.taggers...) - otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Schema.Items, level+1) - if err != nil { - return nil, err - } - return otherTaggers, nil - case *ast.Ident: - taggers := []tagParser{} - if iftpe.Obj == nil { - taggers = itemsTaggers(items.Schema, level) - } - otherTaggers, err := parseArrayTypes(expr, items.Schema.Items, level+1) - if err != nil { - return nil, err - } - return append(taggers, otherTaggers...), nil - case *ast.StarExpr: - otherTaggers, err := parseArrayTypes(iftpe.X, items, level) - if err != nil { - return nil, err - } - return otherTaggers, nil - default: - return nil, fmt.Errorf("unknown field type ele for %q", nm) - } - } - // check if this is a primitive, if so parse the validations from the - // doc comments of the slice declaration. - if ftped, ok := fld.Type.(*ast.ArrayType); ok { - taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0) - if err != nil { - return sp - } - sp.taggers = append(taggers, sp.taggers...) - } - - } else { - sp.taggers = []tagParser{ - newSingleLineTagParser("required", &setRequiredSchema{schema, nm}), - } - } - return sp -} - -// hasFilePathPrefix reports whether the filesystem path s begins with the -// elements in prefix. -// -// taken from: https://github.com/golang/go/blob/c87520c5981ecdeaa99e7ba636a6088f900c0c75/src/cmd/go/internal/load/path.go#L60-L80 -func hasFilePathPrefix(s, prefix string) bool { - sv := strings.ToUpper(filepath.VolumeName(s)) - pv := strings.ToUpper(filepath.VolumeName(prefix)) - s = s[len(sv):] - prefix = prefix[len(pv):] - switch { - default: - return false - case sv != pv: - return false - case len(s) == len(prefix): - return s == prefix - case len(s) > len(prefix): - if prefix != "" && prefix[len(prefix)-1] == filepath.Separator { - return strings.HasPrefix(s, prefix) - } - return s[len(prefix)] == filepath.Separator && s[:len(prefix)] == prefix - } -} - -func goroot() string { - cmd := exec.Command("go", "env", "GOROOT") - out, err := cmd.Output() - if err != nil { - panic("Could not detect GOROOT") - } - return string(out) -} - -func (scp *schemaParser) packageForFile(gofile *ast.File, tpe *ast.Ident) (*loader.PackageInfo, error) { - fn := scp.program.Fset.File(gofile.Pos()).Name() - if Debug { - log.Println("trying for", fn, tpe.Name, tpe.String()) - } - fa, err := filepath.Abs(fn) - if err != nil { - return nil, err - } - if Debug { - log.Println("absolute path", fa) - } - var fgp string - gopath := os.Getenv("GOPATH") - if gopath == "" { - gopath = filepath.Join(os.Getenv("HOME"), "go") - } - for _, p := range append(filepath.SplitList(gopath), goroot()) { - pref := filepath.Join(p, "src") - if hasFilePathPrefix(fa, pref) { - fgp = filepath.Dir(strings.TrimPrefix(fa, pref))[1:] - break - } - } - if Debug { - log.Println("package in gopath", fgp) - } - for pkg, pkgInfo := range scp.program.AllPackages { - if Debug { - log.Println("inferring for", tpe.Name, "with", gofile.Name.Name, "at", pkg.Path(), "against", filepath.ToSlash(fgp)) - } - if pkg.Name() == gofile.Name.Name && filepath.ToSlash(fgp) == pkg.Path() { - return pkgInfo, nil - } - } - - return nil, fmt.Errorf("unable to determine package for %s", fn) -} - -func (scp *schemaParser) packageForSelector(gofile *ast.File, expr ast.Expr) (*loader.PackageInfo, error) { - - if pth, ok := expr.(*ast.Ident); ok { - // lookup import - var selPath string - for _, imp := range gofile.Imports { - pv, err := strconv.Unquote(imp.Path.Value) - if err != nil { - pv = imp.Path.Value - } - if imp.Name != nil { - if imp.Name.Name == pth.Name { - selPath = pv - break - } - } else { - pkg := scp.program.Package(pv) - if pkg != nil && pth.Name == pkg.Pkg.Name() { - selPath = pv - break - } else { - parts := strings.Split(pv, "/") - if len(parts) > 0 && parts[len(parts)-1] == pth.Name { - selPath = pv - break - } - } - } - } - // find actual struct - if selPath == "" { - return nil, fmt.Errorf("no import found for %s", pth.Name) - } - - pkg := scp.program.Package(selPath) - if pkg != nil { - return pkg, nil - } - // TODO: I must admit this made me cry, it's not even a great solution. - pkg = scp.program.Package("github.com/go-swagger/go-swagger/vendor/" + selPath) - if pkg != nil { - return pkg, nil - } - for _, info := range scp.program.AllPackages { - n := info.String() - path := "/vendor/" + selPath - if strings.HasSuffix(n, path) { - pkg = scp.program.Package(n) - return pkg, nil - } - } - } - return nil, fmt.Errorf("can't determine selector path from %v", expr) -} - -func (scp *schemaParser) makeRef(file *ast.File, pkg *loader.PackageInfo, gd *ast.GenDecl, ts *ast.TypeSpec, prop swaggerTypable) error { - sd := newSchemaDecl(file, gd, ts) - sd.inferNames() - // make an exception for time.Time because this is a well-known string format - if sd.Name == "Time" && pkg.String() == "time" { - return nil - } - ref, err := spec.NewRef("#/definitions/" + sd.Name) - if err != nil { - return err - } - prop.SetRef(ref) - scp.postDecls = append(scp.postDecls, *sd) - return nil -} - -func (scp *schemaParser) parseIdentProperty(pkg *loader.PackageInfo, expr *ast.Ident, prop swaggerTypable) error { - // before proceeding make an exception to time.Time because it is a well known string format - if pkg.String() == "time" && expr.String() == "Time" { - prop.Typed("string", "date-time") - return nil - } - - // find the file this selector points to - file, gd, ts, err := findSourceFile(pkg, expr.Name) - - if err != nil { - err := swaggerSchemaForType(expr.Name, prop) - if err != nil { - return fmt.Errorf("package %s, error is: %v", pkg.String(), err) - } - return nil - } - - if at, ok := ts.Type.(*ast.ArrayType); ok { - // the swagger spec defines strfmt base64 as []byte. - // in that case we don't actually want to turn it into an array - // but we want to turn it into a string - if _, ok := at.Elt.(*ast.Ident); ok { - if strfmtName, ok := strfmtName(gd.Doc); ok { - prop.Typed("string", strfmtName) - return nil - } - } - // this is a selector, so most likely not base64 - if strfmtName, ok := strfmtName(gd.Doc); ok { - prop.Items().Typed("string", strfmtName) - return nil - } - } - - // look at doc comments for swagger:strfmt [name] - // when found this is the format name, create a schema with that name - if strfmtName, ok := strfmtName(gd.Doc); ok { - prop.Typed("string", strfmtName) - return nil - } - - if enumName, ok := enumName(gd.Doc); ok { - var enumValues = getEnumValues(file, enumName) - if len(enumValues) > 0 { - prop.WithEnum(enumValues...) - var typeName = reflect.TypeOf(enumValues[0]).String() - err := swaggerSchemaForType(typeName, prop) - if err != nil { - return fmt.Errorf("file %s, error is: %v", file.Name, err) - } - } - } - - if defaultName, ok := defaultName(gd.Doc); ok { - log.Println(defaultName) - return nil - } - - if typeName, ok := typeName(gd.Doc); ok { - _ = swaggerSchemaForType(typeName, prop) - return nil - } - - if isAliasParam(prop) || aliasParam(gd.Doc) { - itype, ok := ts.Type.(*ast.Ident) - if ok { - err := swaggerSchemaForType(itype.Name, prop) - if err == nil { - return nil - } - } - } - switch tpe := ts.Type.(type) { - case *ast.ArrayType: - return scp.makeRef(file, pkg, gd, ts, prop) - case *ast.StructType: - return scp.makeRef(file, pkg, gd, ts, prop) - - case *ast.Ident: - return scp.makeRef(file, pkg, gd, ts, prop) - - case *ast.StarExpr: - return parseProperty(scp, file, tpe.X, prop) - - case *ast.SelectorExpr: - // return scp.refForSelector(file, gd, tpe, ts, prop) - return scp.makeRef(file, pkg, gd, ts, prop) - - case *ast.InterfaceType: - return scp.makeRef(file, pkg, gd, ts, prop) - - case *ast.MapType: - return scp.makeRef(file, pkg, gd, ts, prop) - - default: - err := swaggerSchemaForType(expr.Name, prop) - if err != nil { - return fmt.Errorf("package %s, error is: %v", pkg.String(), err) - } - return nil - } - -} - -func (scp *schemaParser) typeForSelector(gofile *ast.File, expr *ast.SelectorExpr, prop swaggerTypable) error { - pkg, err := scp.packageForSelector(gofile, expr.X) - if err != nil { - return err - } - - return scp.parseIdentProperty(pkg, expr.Sel, prop) -} - -func findSourceFile(pkg *loader.PackageInfo, typeName string) (*ast.File, *ast.GenDecl, *ast.TypeSpec, error) { - for _, file := range pkg.Files { - for _, decl := range file.Decls { - if gd, ok := decl.(*ast.GenDecl); ok { - for _, gs := range gd.Specs { - if ts, ok := gs.(*ast.TypeSpec); ok { - strfmtNme, isStrfmt := strfmtName(gd.Doc) - if (isStrfmt && strfmtNme == typeName) || ts.Name != nil && ts.Name.Name == typeName { - return file, gd, ts, nil - } - } - } - } - } - } - return nil, nil, nil, fmt.Errorf("unable to find %s in %s", typeName, pkg.String()) -} - -func allOfMember(comments *ast.CommentGroup) bool { - if comments != nil { - for _, cmt := range comments.List { - for _, ln := range strings.Split(cmt.Text, "\n") { - if rxAllOf.MatchString(ln) { - return true - } - } - } - } - return false -} - -func fileParam(comments *ast.CommentGroup) bool { - if comments != nil { - for _, cmt := range comments.List { - for _, ln := range strings.Split(cmt.Text, "\n") { - if rxFileUpload.MatchString(ln) { - return true - } - } - } - } - return false -} - -func strfmtName(comments *ast.CommentGroup) (string, bool) { - if comments != nil { - for _, cmt := range comments.List { - for _, ln := range strings.Split(cmt.Text, "\n") { - matches := rxStrFmt.FindStringSubmatch(ln) - if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 { - return strings.TrimSpace(matches[1]), true - } - } - } - } - return "", false -} - -func ignored(comments *ast.CommentGroup) bool { - if comments != nil { - for _, cmt := range comments.List { - for _, ln := range strings.Split(cmt.Text, "\n") { - if rxIgnoreOverride.MatchString(ln) { - return true - } - } - } - } - return false -} - -func enumName(comments *ast.CommentGroup) (string, bool) { - if comments != nil { - for _, cmt := range comments.List { - for _, ln := range strings.Split(cmt.Text, "\n") { - matches := rxEnum.FindStringSubmatch(ln) - if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 { - return strings.TrimSpace(matches[1]), true - } - } - } - } - return "", false -} - -func aliasParam(comments *ast.CommentGroup) bool { - if comments != nil { - for _, cmt := range comments.List { - for _, ln := range strings.Split(cmt.Text, "\n") { - if rxAlias.MatchString(ln) { - return true - } - } - } - } - return false -} - -func defaultName(comments *ast.CommentGroup) (string, bool) { - if comments != nil { - for _, cmt := range comments.List { - for _, ln := range strings.Split(cmt.Text, "\n") { - matches := rxDefault.FindStringSubmatch(ln) - if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 { - return strings.TrimSpace(matches[1]), true - } - } - } - } - return "", false -} - -func typeName(comments *ast.CommentGroup) (string, bool) { - - var typ string - if comments != nil { - for _, cmt := range comments.List { - for _, ln := range strings.Split(cmt.Text, "\n") { - matches := rxType.FindStringSubmatch(ln) - if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 { - typ = strings.TrimSpace(matches[1]) - return typ, true - } - } - } - } - return "", false -} - -func parseProperty(scp *schemaParser, gofile *ast.File, fld ast.Expr, prop swaggerTypable) error { - switch ftpe := fld.(type) { - case *ast.Ident: // simple value - pkg, err := scp.packageForFile(gofile, ftpe) - if err != nil { - return err - } - return scp.parseIdentProperty(pkg, ftpe, prop) - - case *ast.StarExpr: // pointer to something, optional by default - if err := parseProperty(scp, gofile, ftpe.X, prop); err != nil { - return err - } - - case *ast.ArrayType: // slice type - if err := parseProperty(scp, gofile, ftpe.Elt, prop.Items()); err != nil { - return err - } - - case *ast.StructType: - schema := prop.Schema() - if schema == nil { - return fmt.Errorf("items doesn't support embedded structs") - } - return scp.parseStructType(gofile, prop.Schema(), ftpe, make(map[string]string)) - - case *ast.SelectorExpr: - err := scp.typeForSelector(gofile, ftpe, prop) - return err - - case *ast.MapType: - // check if key is a string type, if not print a message - // and skip the map property. Only maps with string keys can go into additional properties - sch := prop.Schema() - if sch == nil { - return fmt.Errorf("items doesn't support maps") - } - if keyIdent, ok := ftpe.Key.(*ast.Ident); sch != nil && ok { - if keyIdent.Name == "string" { - if sch.AdditionalProperties == nil { - sch.AdditionalProperties = new(spec.SchemaOrBool) - } - sch.AdditionalProperties.Allows = false - if sch.AdditionalProperties.Schema == nil { - sch.AdditionalProperties.Schema = new(spec.Schema) - } - if err := parseProperty(scp, gofile, ftpe.Value, schemaTypable{sch.AdditionalProperties.Schema, 0}); err != nil { - return err - } - sch.Typed("object", "") - } - } - - case *ast.InterfaceType: - prop.Schema().Typed("object", "") - default: - pos := "unknown file:unknown position" - if scp != nil { - if scp.program != nil { - if scp.program.Fset != nil { - pos = scp.program.Fset.Position(fld.Pos()).String() - } - } - } - return fmt.Errorf("Expr (%s) is unsupported for a schema", pos) - } - return nil -} - -func parseJSONTag(field *ast.Field) (name string, ignore bool, isString bool, err error) { - if len(field.Names) > 0 { - name = field.Names[0].Name - } - if field.Tag != nil && len(strings.TrimSpace(field.Tag.Value)) > 0 { - tv, err := strconv.Unquote(field.Tag.Value) - if err != nil { - return name, false, false, err - } - - if strings.TrimSpace(tv) != "" { - st := reflect.StructTag(tv) - jsonParts := strings.Split(st.Get("json"), ",") - jsonName := jsonParts[0] - - if len(jsonParts) > 1 && jsonParts[1] == "string" { - isString = isFieldStringable(field.Type) - } - - if jsonName == "-" { - return name, true, isString, nil - } else if jsonName != "" { - return jsonName, false, isString, nil - } - } - } - return name, false, false, nil -} - -// isFieldStringable check if the field type is a scalar. If the field type is -// *ast.StarExpr and is pointer type, check if it refers to a scalar. -// Otherwise, the ",string" directive doesn't apply. -func isFieldStringable(tpe ast.Expr) bool { - if ident, ok := tpe.(*ast.Ident); ok { - switch ident.Name { - case "int", "int8", "int16", "int32", "int64", - "uint", "uint8", "uint16", "uint32", "uint64", - "float64", "string", "bool": - return true - } - } else if starExpr, ok := tpe.(*ast.StarExpr); ok { - return isFieldStringable(starExpr.X) - } else { - return false - } - return false -} diff --git a/vendor/github.com/go-swagger/go-swagger/scan/validators.go b/vendor/github.com/go-swagger/go-swagger/scan/validators.go deleted file mode 100644 index 45caf8783..000000000 --- a/vendor/github.com/go-swagger/go-swagger/scan/validators.go +++ /dev/null @@ -1,829 +0,0 @@ -//go:build !go1.11 -// +build !go1.11 - -// Copyright 2015 go-swagger maintainers -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package scan - -import ( - "encoding/json" - "fmt" - "regexp" - "strconv" - "strings" - - "github.com/go-openapi/spec" -) - -type validationBuilder interface { - SetMaximum(float64, bool) - SetMinimum(float64, bool) - SetMultipleOf(float64) - - SetMinItems(int64) - SetMaxItems(int64) - - SetMinLength(int64) - SetMaxLength(int64) - SetPattern(string) - - SetUnique(bool) - SetEnum(string) - SetDefault(interface{}) - SetExample(interface{}) -} - -type valueParser interface { - Parse([]string) error - Matches(string) bool -} - -type setMaximum struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (sm *setMaximum) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sm.rx.FindStringSubmatch(lines[0]) - if len(matches) > 2 && len(matches[2]) > 0 { - max, err := strconv.ParseFloat(matches[2], 64) - if err != nil { - return err - } - sm.builder.SetMaximum(max, matches[1] == "<") - } - return nil -} - -func (sm *setMaximum) Matches(line string) bool { - return sm.rx.MatchString(line) -} - -type setMinimum struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (sm *setMinimum) Matches(line string) bool { - return sm.rx.MatchString(line) -} - -func (sm *setMinimum) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sm.rx.FindStringSubmatch(lines[0]) - if len(matches) > 2 && len(matches[2]) > 0 { - min, err := strconv.ParseFloat(matches[2], 64) - if err != nil { - return err - } - sm.builder.SetMinimum(min, matches[1] == ">") - } - return nil -} - -type setMultipleOf struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (sm *setMultipleOf) Matches(line string) bool { - return sm.rx.MatchString(line) -} - -func (sm *setMultipleOf) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sm.rx.FindStringSubmatch(lines[0]) - if len(matches) > 2 && len(matches[1]) > 0 { - multipleOf, err := strconv.ParseFloat(matches[1], 64) - if err != nil { - return err - } - sm.builder.SetMultipleOf(multipleOf) - } - return nil -} - -type setMaxItems struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (sm *setMaxItems) Matches(line string) bool { - return sm.rx.MatchString(line) -} - -func (sm *setMaxItems) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sm.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - maxItems, err := strconv.ParseInt(matches[1], 10, 64) - if err != nil { - return err - } - sm.builder.SetMaxItems(maxItems) - } - return nil -} - -type setMinItems struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (sm *setMinItems) Matches(line string) bool { - return sm.rx.MatchString(line) -} - -func (sm *setMinItems) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sm.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - minItems, err := strconv.ParseInt(matches[1], 10, 64) - if err != nil { - return err - } - sm.builder.SetMinItems(minItems) - } - return nil -} - -type setMaxLength struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (sm *setMaxLength) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sm.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - maxLength, err := strconv.ParseInt(matches[1], 10, 64) - if err != nil { - return err - } - sm.builder.SetMaxLength(maxLength) - } - return nil -} - -func (sm *setMaxLength) Matches(line string) bool { - return sm.rx.MatchString(line) -} - -type setMinLength struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (sm *setMinLength) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sm.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - minLength, err := strconv.ParseInt(matches[1], 10, 64) - if err != nil { - return err - } - sm.builder.SetMinLength(minLength) - } - return nil -} - -func (sm *setMinLength) Matches(line string) bool { - return sm.rx.MatchString(line) -} - -type setPattern struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (sm *setPattern) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sm.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - sm.builder.SetPattern(matches[1]) - } - return nil -} - -func (sm *setPattern) Matches(line string) bool { - return sm.rx.MatchString(line) -} - -type setCollectionFormat struct { - builder operationValidationBuilder - rx *regexp.Regexp -} - -func (sm *setCollectionFormat) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sm.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - sm.builder.SetCollectionFormat(matches[1]) - } - return nil -} - -func (sm *setCollectionFormat) Matches(line string) bool { - return sm.rx.MatchString(line) -} - -type setUnique struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (su *setUnique) Matches(line string) bool { - return su.rx.MatchString(line) -} - -func (su *setUnique) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := su.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - req, err := strconv.ParseBool(matches[1]) - if err != nil { - return err - } - su.builder.SetUnique(req) - } - return nil -} - -type setEnum struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (se *setEnum) Matches(line string) bool { - return se.rx.MatchString(line) -} - -func (se *setEnum) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := se.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - se.builder.SetEnum(matches[1]) - } - return nil -} - -func parseValueFromSchema(s string, schema *spec.SimpleSchema) (interface{}, error) { - if schema != nil { - switch strings.Trim(schema.TypeName(), "\"") { - case "integer", "int", "int64", "int32", "int16": - return strconv.Atoi(s) - case "bool", "boolean": - return strconv.ParseBool(s) - case "number", "float64", "float32": - return strconv.ParseFloat(s, 64) - case "object": - var obj map[string]interface{} - if err := json.Unmarshal([]byte(s), &obj); err != nil { - // If we can't parse it, just return the string. - return s, nil - } - return obj, nil - case "array": - var slice []interface{} - if err := json.Unmarshal([]byte(s), &slice); err != nil { - // If we can't parse it, just return the string. - return s, nil - } - return slice, nil - default: - return s, nil - } - } else { - return s, nil - } -} - -type setDefault struct { - scheme *spec.SimpleSchema - builder validationBuilder - rx *regexp.Regexp -} - -func (sd *setDefault) Matches(line string) bool { - return sd.rx.MatchString(line) -} - -func (sd *setDefault) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sd.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - d, err := parseValueFromSchema(matches[1], sd.scheme) - if err != nil { - return err - } - sd.builder.SetDefault(d) - } - return nil -} - -type setExample struct { - scheme *spec.SimpleSchema - builder validationBuilder - rx *regexp.Regexp -} - -func (se *setExample) Matches(line string) bool { - return se.rx.MatchString(line) -} - -func (se *setExample) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := se.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - d, err := parseValueFromSchema(matches[1], se.scheme) - if err != nil { - return err - } - se.builder.SetExample(d) - } - return nil -} - -type matchOnlyParam struct { - tgt *spec.Parameter - rx *regexp.Regexp -} - -func (mo *matchOnlyParam) Matches(line string) bool { - return mo.rx.MatchString(line) -} - -func (mo *matchOnlyParam) Parse(lines []string) error { - return nil -} - -type setRequiredParam struct { - tgt *spec.Parameter -} - -func (su *setRequiredParam) Matches(line string) bool { - return rxRequired.MatchString(line) -} - -func (su *setRequiredParam) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := rxRequired.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - req, err := strconv.ParseBool(matches[1]) - if err != nil { - return err - } - su.tgt.Required = req - } - return nil -} - -type setReadOnlySchema struct { - tgt *spec.Schema -} - -func (su *setReadOnlySchema) Matches(line string) bool { - return rxReadOnly.MatchString(line) -} - -func (su *setReadOnlySchema) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := rxReadOnly.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - req, err := strconv.ParseBool(matches[1]) - if err != nil { - return err - } - su.tgt.ReadOnly = req - } - return nil -} - -type setDiscriminator struct { - schema *spec.Schema - field string -} - -func (su *setDiscriminator) Matches(line string) bool { - return rxDiscriminator.MatchString(line) -} - -func (su *setDiscriminator) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := rxDiscriminator.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - req, err := strconv.ParseBool(matches[1]) - if err != nil { - return err - } - if req { - su.schema.Discriminator = su.field - } else { - if su.schema.Discriminator == su.field { - su.schema.Discriminator = "" - } - } - } - return nil -} - -type setRequiredSchema struct { - schema *spec.Schema - field string -} - -func (su *setRequiredSchema) Matches(line string) bool { - return rxRequired.MatchString(line) -} - -func (su *setRequiredSchema) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := rxRequired.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - req, err := strconv.ParseBool(matches[1]) - if err != nil { - return err - } - midx := -1 - for i, nm := range su.schema.Required { - if nm == su.field { - midx = i - break - } - } - if req { - if midx < 0 { - su.schema.Required = append(su.schema.Required, su.field) - } - } else if midx >= 0 { - su.schema.Required = append(su.schema.Required[:midx], su.schema.Required[midx+1:]...) - } - } - return nil -} - -func newMultilineDropEmptyParser(rx *regexp.Regexp, set func([]string)) *multiLineDropEmptyParser { - return &multiLineDropEmptyParser{ - rx: rx, - set: set, - } -} - -type multiLineDropEmptyParser struct { - set func([]string) - rx *regexp.Regexp -} - -func (m *multiLineDropEmptyParser) Matches(line string) bool { - return m.rx.MatchString(line) -} - -func (m *multiLineDropEmptyParser) Parse(lines []string) error { - m.set(removeEmptyLines(lines)) - return nil -} - -func newSetSchemes(set func([]string)) *setSchemes { - return &setSchemes{ - set: set, - rx: rxSchemes, - } -} - -type setSchemes struct { - set func([]string) - rx *regexp.Regexp -} - -func (ss *setSchemes) Matches(line string) bool { - return ss.rx.MatchString(line) -} - -func (ss *setSchemes) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := ss.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - sch := strings.Split(matches[1], ", ") - - var schemes []string - for _, s := range sch { - ts := strings.TrimSpace(s) - if ts != "" { - schemes = append(schemes, ts) - } - } - ss.set(schemes) - } - return nil -} - -func newSetSecurity(rx *regexp.Regexp, setter func([]map[string][]string)) *setSecurity { - return &setSecurity{ - set: setter, - rx: rx, - } -} - -type setSecurity struct { - set func([]map[string][]string) - rx *regexp.Regexp -} - -func (ss *setSecurity) Matches(line string) bool { - return ss.rx.MatchString(line) -} - -func (ss *setSecurity) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - - var result []map[string][]string - for _, line := range lines { - kv := strings.SplitN(line, ":", 2) - scopes := []string{} - var key string - - if len(kv) > 1 { - scs := strings.Split(kv[1], ",") - for _, scope := range scs { - tr := strings.TrimSpace(scope) - if tr != "" { - tr = strings.SplitAfter(tr, " ")[0] - scopes = append(scopes, strings.TrimSpace(tr)) - } - } - - key = strings.TrimSpace(kv[0]) - - result = append(result, map[string][]string{key: scopes}) - } - } - ss.set(result) - return nil -} - -func newSetResponses(definitions map[string]spec.Schema, responses map[string]spec.Response, setter func(*spec.Response, map[int]spec.Response)) *setOpResponses { - return &setOpResponses{ - set: setter, - rx: rxResponses, - definitions: definitions, - responses: responses, - } -} - -type setOpResponses struct { - set func(*spec.Response, map[int]spec.Response) - rx *regexp.Regexp - definitions map[string]spec.Schema - responses map[string]spec.Response -} - -func (ss *setOpResponses) Matches(line string) bool { - return ss.rx.MatchString(line) -} - -// ResponseTag used when specifying a response to point to a defined swagger:response -const ResponseTag = "response" - -// BodyTag used when specifying a response to point to a model/schema -const BodyTag = "body" - -// DescriptionTag used when specifying a response that gives a description of the response -const DescriptionTag = "description" - -func parseTags(line string) (modelOrResponse string, arrays int, isDefinitionRef bool, description string, err error) { - tags := strings.Split(line, " ") - parsedModelOrResponse := false - - for i, tagAndValue := range tags { - tagValList := strings.SplitN(tagAndValue, ":", 2) - var tag, value string - if len(tagValList) > 1 { - tag = tagValList[0] - value = tagValList[1] - } else { - //TODO: Print a warning, and in the long term, do not support not tagged values - //Add a default tag if none is supplied - if i == 0 { - tag = ResponseTag - } else { - tag = DescriptionTag - } - value = tagValList[0] - } - - foundModelOrResponse := false - if !parsedModelOrResponse { - if tag == BodyTag { - foundModelOrResponse = true - isDefinitionRef = true - } - if tag == ResponseTag { - foundModelOrResponse = true - isDefinitionRef = false - } - } - if foundModelOrResponse { - //Read the model or response tag - parsedModelOrResponse = true - //Check for nested arrays - arrays = 0 - for strings.HasPrefix(value, "[]") { - arrays++ - value = value[2:] - } - //What's left over is the model name - modelOrResponse = value - } else { - foundDescription := false - if tag == DescriptionTag { - foundDescription = true - } - if foundDescription { - //Descriptions are special, they make they read the rest of the line - descriptionWords := []string{value} - if i < len(tags)-1 { - descriptionWords = append(descriptionWords, tags[i+1:]...) - } - description = strings.Join(descriptionWords, " ") - break - } else { - if tag == ResponseTag || tag == BodyTag || tag == DescriptionTag { - err = fmt.Errorf("Found valid tag %s, but not in a valid position", tag) - } else { - err = fmt.Errorf("Found invalid tag: %s", tag) - } - //return error - return - } - } - } - - //TODO: Maybe do, if !parsedModelOrResponse {return some error} - return -} - -func (ss *setOpResponses) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - - var def *spec.Response - var scr map[int]spec.Response - - for _, line := range lines { - kv := strings.SplitN(line, ":", 2) - var key, value string - - if len(kv) > 1 { - key = strings.TrimSpace(kv[0]) - if key == "" { - // this must be some weird empty line - continue - } - value = strings.TrimSpace(kv[1]) - if value == "" { - var resp spec.Response - if strings.EqualFold("default", key) { - if def == nil { - def = &resp - } - } else { - if sc, err := strconv.Atoi(key); err == nil { - if scr == nil { - scr = make(map[int]spec.Response) - } - scr[sc] = resp - } - } - continue - } - refTarget, arrays, isDefinitionRef, description, err := parseTags(value) - if err != nil { - return err - } - //A possible exception for having a definition - if _, ok := ss.responses[refTarget]; !ok { - if _, ok := ss.definitions[refTarget]; ok { - isDefinitionRef = true - } - } - - var ref spec.Ref - if isDefinitionRef { - if description == "" { - description = refTarget - } - ref, err = spec.NewRef("#/definitions/" + refTarget) - } else { - ref, err = spec.NewRef("#/responses/" + refTarget) - } - if err != nil { - return err - } - - // description should used on anyway. - resp := spec.Response{ResponseProps: spec.ResponseProps{Description: description}} - - if isDefinitionRef { - resp.Schema = new(spec.Schema) - resp.Description = description - if arrays == 0 { - resp.Schema.Ref = ref - } else { - cs := resp.Schema - for i := 0; i < arrays; i++ { - cs.Typed("array", "") - cs.Items = new(spec.SchemaOrArray) - cs.Items.Schema = new(spec.Schema) - cs = cs.Items.Schema - } - cs.Ref = ref - } - // ref. could be empty while use description tag - } else if len(refTarget) > 0 { - resp.Ref = ref - } - - if strings.EqualFold("default", key) { - if def == nil { - def = &resp - } - } else { - if sc, err := strconv.Atoi(key); err == nil { - if scr == nil { - scr = make(map[int]spec.Response) - } - scr[sc] = resp - } - } - } - } - ss.set(def, scr) - return nil -} - -func parseEnum(val string, s *spec.SimpleSchema) []interface{} { - list := strings.Split(val, ",") - interfaceSlice := make([]interface{}, len(list)) - for i, d := range list { - v, err := parseValueFromSchema(d, s) - if err != nil { - interfaceSlice[i] = d - continue - } - - interfaceSlice[i] = v - } - return interfaceSlice -} diff --git a/vendor/github.com/gorilla/handlers/.editorconfig b/vendor/github.com/gorilla/handlers/.editorconfig new file mode 100644 index 000000000..c6b74c3e0 --- /dev/null +++ b/vendor/github.com/gorilla/handlers/.editorconfig @@ -0,0 +1,20 @@ +; https://editorconfig.org/ + +root = true + +[*] +insert_final_newline = true +charset = utf-8 +trim_trailing_whitespace = true +indent_style = space +indent_size = 2 + +[{Makefile,go.mod,go.sum,*.go,.gitmodules}] +indent_style = tab +indent_size = 4 + +[*.md] +indent_size = 4 +trim_trailing_whitespace = false + +eclint_indent_style = unset \ No newline at end of file diff --git a/vendor/github.com/gorilla/handlers/.gitignore b/vendor/github.com/gorilla/handlers/.gitignore new file mode 100644 index 000000000..577a89e81 --- /dev/null +++ b/vendor/github.com/gorilla/handlers/.gitignore @@ -0,0 +1,2 @@ +# Output of the go test coverage tool +coverage.coverprofile diff --git a/vendor/github.com/gorilla/handlers/LICENSE b/vendor/github.com/gorilla/handlers/LICENSE index 66ea3c8ae..bb9d80bc9 100644 --- a/vendor/github.com/gorilla/handlers/LICENSE +++ b/vendor/github.com/gorilla/handlers/LICENSE @@ -1,22 +1,27 @@ -Copyright (c) 2013 The Gorilla Handlers Authors. All rights reserved. +Copyright (c) 2023 The Gorilla Authors. All rights reserved. Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: +modification, are permitted provided that the following conditions are +met: - Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. - Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/gorilla/handlers/Makefile b/vendor/github.com/gorilla/handlers/Makefile new file mode 100644 index 000000000..003b784f7 --- /dev/null +++ b/vendor/github.com/gorilla/handlers/Makefile @@ -0,0 +1,34 @@ +GO_LINT=$(shell which golangci-lint 2> /dev/null || echo '') +GO_LINT_URI=github.com/golangci/golangci-lint/cmd/golangci-lint@latest + +GO_SEC=$(shell which gosec 2> /dev/null || echo '') +GO_SEC_URI=github.com/securego/gosec/v2/cmd/gosec@latest + +GO_VULNCHECK=$(shell which govulncheck 2> /dev/null || echo '') +GO_VULNCHECK_URI=golang.org/x/vuln/cmd/govulncheck@latest + +.PHONY: verify +verify: sec govulncheck lint test + +.PHONY: lint +lint: + $(if $(GO_LINT), ,go install $(GO_LINT_URI)) + @echo "##### Running golangci-lint #####" + golangci-lint run -v + +.PHONY: sec +sec: + $(if $(GO_SEC), ,go install $(GO_SEC_URI)) + @echo "##### Running gosec #####" + gosec ./... + +.PHONY: govulncheck +govulncheck: + $(if $(GO_VULNCHECK), ,go install $(GO_VULNCHECK_URI)) + @echo "##### Running govulncheck #####" + govulncheck ./... + +.PHONY: test +test: + @echo "##### Running tests #####" + go test -race -cover -coverprofile=coverage.coverprofile -covermode=atomic -v ./... diff --git a/vendor/github.com/gorilla/handlers/README.md b/vendor/github.com/gorilla/handlers/README.md index 6eba66bf3..02555b264 100644 --- a/vendor/github.com/gorilla/handlers/README.md +++ b/vendor/github.com/gorilla/handlers/README.md @@ -1,9 +1,9 @@ -gorilla/handlers -================ -[![GoDoc](https://godoc.org/github.com/gorilla/handlers?status.svg)](https://godoc.org/github.com/gorilla/handlers) -[![CircleCI](https://circleci.com/gh/gorilla/handlers.svg?style=svg)](https://circleci.com/gh/gorilla/handlers) -[![Sourcegraph](https://sourcegraph.com/github.com/gorilla/handlers/-/badge.svg)](https://sourcegraph.com/github.com/gorilla/handlers?badge) +# gorilla/handlers +![Testing](https://github.com/gorilla/handlers/actions/workflows/test.yml/badge.svg) +[![Codecov](https://codecov.io/github/gorilla/handlers/branch/main/graph/badge.svg)](https://codecov.io/github/gorilla/handlers) +[![GoDoc](https://godoc.org/github.com/gorilla/handlers?status.svg)](https://godoc.org/github.com/gorilla/handlers) +[![Sourcegraph](https://sourcegraph.com/github.com/gorilla/handlers/-/badge.svg)](https://sourcegraph.com/github.com/gorilla/handlers?badge) Package handlers is a collection of handlers (aka "HTTP middleware") for use with Go's `net/http` package (or any framework supporting `http.Handler`), including: diff --git a/vendor/github.com/gorilla/handlers/canonical.go b/vendor/github.com/gorilla/handlers/canonical.go index 8437fefc1..7121f5307 100644 --- a/vendor/github.com/gorilla/handlers/canonical.go +++ b/vendor/github.com/gorilla/handlers/canonical.go @@ -21,12 +21,11 @@ type canonical struct { // // Example: // -// r := mux.NewRouter() -// canonical := handlers.CanonicalHost("http://www.gorillatoolkit.org", 302) -// r.HandleFunc("/route", YourHandler) -// -// log.Fatal(http.ListenAndServe(":7000", canonical(r))) +// r := mux.NewRouter() +// canonical := handlers.CanonicalHost("http://www.gorillatoolkit.org", 302) +// r.HandleFunc("/route", YourHandler) // +// log.Fatal(http.ListenAndServe(":7000", canonical(r))) func CanonicalHost(domain string, code int) func(h http.Handler) http.Handler { fn := func(h http.Handler) http.Handler { return canonical{h, domain, code} diff --git a/vendor/github.com/gorilla/handlers/compress.go b/vendor/github.com/gorilla/handlers/compress.go index 1e95f1ccb..d6f589503 100644 --- a/vendor/github.com/gorilla/handlers/compress.go +++ b/vendor/github.com/gorilla/handlers/compress.go @@ -44,13 +44,13 @@ type flusher interface { Flush() error } -func (w *compressResponseWriter) Flush() { +func (cw *compressResponseWriter) Flush() { // Flush compressed data if compressor supports it. - if f, ok := w.compressor.(flusher); ok { - f.Flush() + if f, ok := cw.compressor.(flusher); ok { + _ = f.Flush() } // Flush HTTP response. - if f, ok := w.w.(http.Flusher); ok { + if f, ok := cw.w.(http.Flusher); ok { f.Flush() } } diff --git a/vendor/github.com/gorilla/handlers/cors.go b/vendor/github.com/gorilla/handlers/cors.go index 0dcdffb3d..8af9c096e 100644 --- a/vendor/github.com/gorilla/handlers/cors.go +++ b/vendor/github.com/gorilla/handlers/cors.go @@ -26,14 +26,14 @@ type cors struct { type OriginValidator func(string) bool var ( - defaultCorsOptionStatusCode = 200 - defaultCorsMethods = []string{"GET", "HEAD", "POST"} + defaultCorsOptionStatusCode = http.StatusOK + defaultCorsMethods = []string{http.MethodGet, http.MethodHead, http.MethodPost} defaultCorsHeaders = []string{"Accept", "Accept-Language", "Content-Language", "Origin"} - // (WebKit/Safari v9 sends the Origin header by default in AJAX requests) + // (WebKit/Safari v9 sends the Origin header by default in AJAX requests). ) const ( - corsOptionMethod string = "OPTIONS" + corsOptionMethod string = http.MethodOptions corsAllowOriginHeader string = "Access-Control-Allow-Origin" corsExposeHeadersHeader string = "Access-Control-Expose-Headers" corsMaxAgeHeader string = "Access-Control-Max-Age" @@ -101,10 +101,8 @@ func (ch *cors) ServeHTTP(w http.ResponseWriter, r *http.Request) { if !ch.isMatch(method, defaultCorsMethods) { w.Header().Set(corsAllowMethodsHeader, method) } - } else { - if len(ch.exposedHeaders) > 0 { - w.Header().Set(corsExposeHeadersHeader, strings.Join(ch.exposedHeaders, ",")) - } + } else if len(ch.exposedHeaders) > 0 { + w.Header().Set(corsExposeHeadersHeader, strings.Join(ch.exposedHeaders, ",")) } if ch.allowCredentials { @@ -141,22 +139,21 @@ func (ch *cors) ServeHTTP(w http.ResponseWriter, r *http.Request) { // CORS provides Cross-Origin Resource Sharing middleware. // Example: // -// import ( -// "net/http" +// import ( +// "net/http" // -// "github.com/gorilla/handlers" -// "github.com/gorilla/mux" -// ) +// "github.com/gorilla/handlers" +// "github.com/gorilla/mux" +// ) // -// func main() { -// r := mux.NewRouter() -// r.HandleFunc("/users", UserEndpoint) -// r.HandleFunc("/projects", ProjectEndpoint) -// -// // Apply the CORS middleware to our top-level router, with the defaults. -// http.ListenAndServe(":8000", handlers.CORS()(r)) -// } +// func main() { +// r := mux.NewRouter() +// r.HandleFunc("/users", UserEndpoint) +// r.HandleFunc("/projects", ProjectEndpoint) // +// // Apply the CORS middleware to our top-level router, with the defaults. +// http.ListenAndServe(":8000", handlers.CORS()(r)) +// } func CORS(opts ...CORSOption) func(http.Handler) http.Handler { return func(h http.Handler) http.Handler { ch := parseCORSOptions(opts...) @@ -174,7 +171,7 @@ func parseCORSOptions(opts ...CORSOption) *cors { } for _, option := range opts { - option(ch) + _ = option(ch) //TODO: @bharat-rajani, return error to caller if not nil? } return ch diff --git a/vendor/github.com/gorilla/handlers/handlers.go b/vendor/github.com/gorilla/handlers/handlers.go index 0509482ad..9b92fce33 100644 --- a/vendor/github.com/gorilla/handlers/handlers.go +++ b/vendor/github.com/gorilla/handlers/handlers.go @@ -35,7 +35,7 @@ func (h MethodHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) { } sort.Strings(allow) w.Header().Set("Allow", strings.Join(allow, ", ")) - if req.Method == "OPTIONS" { + if req.Method == http.MethodOptions { w.WriteHeader(http.StatusOK) } else { http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) @@ -44,7 +44,7 @@ func (h MethodHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) { } // responseLogger is wrapper of http.ResponseWriter that keeps track of its HTTP -// status code and body size +// status code and body size. type responseLogger struct { w http.ResponseWriter status int @@ -97,7 +97,7 @@ func isContentType(h http.Header, contentType string) bool { // Only PUT, POST, and PATCH requests are considered. func ContentTypeHandler(h http.Handler, contentTypes ...string) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if !(r.Method == "PUT" || r.Method == "POST" || r.Method == "PATCH") { + if !(r.Method == http.MethodPut || r.Method == http.MethodPost || r.Method == http.MethodPatch) { h.ServeHTTP(w, r) return } @@ -108,7 +108,10 @@ func ContentTypeHandler(h http.Handler, contentTypes ...string) http.Handler { return } } - http.Error(w, fmt.Sprintf("Unsupported content type %q; expected one of %q", r.Header.Get("Content-Type"), contentTypes), http.StatusUnsupportedMediaType) + http.Error(w, fmt.Sprintf("Unsupported content type %q; expected one of %q", + r.Header.Get("Content-Type"), + contentTypes), + http.StatusUnsupportedMediaType) }) } @@ -133,12 +136,12 @@ func ContentTypeHandler(h http.Handler, contentTypes ...string) http.Handler { // Form method takes precedence over header method. func HTTPMethodOverrideHandler(h http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if r.Method == "POST" { + if r.Method == http.MethodPost { om := r.FormValue(HTTPMethodOverrideFormKey) if om == "" { om = r.Header.Get(HTTPMethodOverrideHeader) } - if om == "PUT" || om == "PATCH" || om == "DELETE" { + if om == http.MethodPut || om == http.MethodPatch || om == http.MethodDelete { r.Method = om } } diff --git a/vendor/github.com/gorilla/handlers/logging.go b/vendor/github.com/gorilla/handlers/logging.go index 228465eba..2badb6fbf 100644 --- a/vendor/github.com/gorilla/handlers/logging.go +++ b/vendor/github.com/gorilla/handlers/logging.go @@ -18,7 +18,7 @@ // Logging -// LogFormatterParams is the structure any formatter will be handed when time to log comes +// LogFormatterParams is the structure any formatter will be handed when time to log comes. type LogFormatterParams struct { Request *http.Request URL url.URL @@ -27,7 +27,7 @@ type LogFormatterParams struct { Size int } -// LogFormatter gives the signature of the formatter function passed to CustomLoggingHandler +// LogFormatter gives the signature of the formatter function passed to CustomLoggingHandler. type LogFormatter func(writer io.Writer, params LogFormatterParams) // loggingHandler is the http.Handler implementation for LoggingHandlerTo and its @@ -46,7 +46,10 @@ func (h loggingHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) { h.handler.ServeHTTP(w, req) if req.MultipartForm != nil { - req.MultipartForm.RemoveAll() + err := req.MultipartForm.RemoveAll() + if err != nil { + return + } } params := LogFormatterParams{ @@ -76,7 +79,7 @@ func makeLogger(w http.ResponseWriter) (*responseLogger, http.ResponseWriter) { func appendQuoted(buf []byte, s string) []byte { var runeTmp [utf8.UTFMax]byte - for width := 0; len(s) > 0; s = s[width:] { + for width := 0; len(s) > 0; s = s[width:] { //nolint: wastedassign //TODO: why width starts from 0and reassigned as 1 r := rune(s[0]) width = 1 if r >= utf8.RuneSelf { @@ -191,7 +194,7 @@ func buildCommonLogLine(req *http.Request, url url.URL, ts time.Time, status int func writeLog(writer io.Writer, params LogFormatterParams) { buf := buildCommonLogLine(params.Request, params.URL, params.TimeStamp, params.StatusCode, params.Size) buf = append(buf, '\n') - writer.Write(buf) + _, _ = writer.Write(buf) } // writeCombinedLog writes a log entry for req to w in Apache Combined Log Format. @@ -204,7 +207,7 @@ func writeCombinedLog(writer io.Writer, params LogFormatterParams) { buf = append(buf, `" "`...) buf = appendQuoted(buf, params.Request.UserAgent()) buf = append(buf, '"', '\n') - writer.Write(buf) + _, _ = writer.Write(buf) } // CombinedLoggingHandler return a http.Handler that wraps h and logs requests to out in @@ -212,7 +215,7 @@ func writeCombinedLog(writer io.Writer, params LogFormatterParams) { // // See http://httpd.apache.org/docs/2.2/logs.html#combined for a description of this format. // -// LoggingHandler always sets the ident field of the log to - +// LoggingHandler always sets the ident field of the log to -. func CombinedLoggingHandler(out io.Writer, h http.Handler) http.Handler { return loggingHandler{out, h, writeCombinedLog} } @@ -226,19 +229,18 @@ func CombinedLoggingHandler(out io.Writer, h http.Handler) http.Handler { // // Example: // -// r := mux.NewRouter() -// r.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { -// w.Write([]byte("This is a catch-all route")) -// }) -// loggedRouter := handlers.LoggingHandler(os.Stdout, r) -// http.ListenAndServe(":1123", loggedRouter) -// +// r := mux.NewRouter() +// r.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { +// w.Write([]byte("This is a catch-all route")) +// }) +// loggedRouter := handlers.LoggingHandler(os.Stdout, r) +// http.ListenAndServe(":1123", loggedRouter) func LoggingHandler(out io.Writer, h http.Handler) http.Handler { return loggingHandler{out, h, writeLog} } // CustomLoggingHandler provides a way to supply a custom log formatter -// while taking advantage of the mechanisms in this package +// while taking advantage of the mechanisms in this package. func CustomLoggingHandler(out io.Writer, h http.Handler, f LogFormatter) http.Handler { return loggingHandler{out, h, f} } diff --git a/vendor/github.com/gorilla/handlers/proxy_headers.go b/vendor/github.com/gorilla/handlers/proxy_headers.go index ed939dcef..281d753e9 100644 --- a/vendor/github.com/gorilla/handlers/proxy_headers.go +++ b/vendor/github.com/gorilla/handlers/proxy_headers.go @@ -18,7 +18,7 @@ var ( // RFC7239 defines a new "Forwarded: " header designed to replace the // existing use of X-Forwarded-* headers. - // e.g. Forwarded: for=192.0.2.60;proto=https;by=203.0.113.43 + // e.g. Forwarded: for=192.0.2.60;proto=https;by=203.0.113.43. forwarded = http.CanonicalHeaderKey("Forwarded") // Allows for a sub-match of the first value after 'for=' to the next // comma, semi-colon or space. The match is case-insensitive. @@ -67,7 +67,9 @@ func ProxyHeaders(h http.Handler) http.Handler { func getIP(r *http.Request) string { var addr string - if fwd := r.Header.Get(xForwardedFor); fwd != "" { + switch { + case r.Header.Get(xForwardedFor) != "": + fwd := r.Header.Get(xForwardedFor) // Only grab the first (client) address. Note that '192.168.0.1, // 10.1.1.1' is a valid key for X-Forwarded-For where addresses after // the first may represent forwarding proxies earlier in the chain. @@ -76,17 +78,15 @@ func getIP(r *http.Request) string { s = len(fwd) } addr = fwd[:s] - } else if fwd := r.Header.Get(xRealIP); fwd != "" { - // X-Real-IP should only contain one IP address (the client making the - // request). - addr = fwd - } else if fwd := r.Header.Get(forwarded); fwd != "" { + case r.Header.Get(xRealIP) != "": + addr = r.Header.Get(xRealIP) + case r.Header.Get(forwarded) != "": // match should contain at least two elements if the protocol was // specified in the Forwarded header. The first element will always be // the 'for=' capture, which we ignore. In the case of multiple IP // addresses (for=8.8.8.8, 8.8.4.4,172.16.1.20 is valid) we only // extract the first, which should be the client IP. - if match := forRegex.FindStringSubmatch(fwd); len(match) > 1 { + if match := forRegex.FindStringSubmatch(r.Header.Get(forwarded)); len(match) > 1 { // IPv6 addresses in Forwarded headers are quoted-strings. We strip // these quotes. addr = strings.Trim(match[1], `"`) diff --git a/vendor/github.com/gorilla/handlers/recovery.go b/vendor/github.com/gorilla/handlers/recovery.go index 4c4c1d9c6..0d4f955ec 100644 --- a/vendor/github.com/gorilla/handlers/recovery.go +++ b/vendor/github.com/gorilla/handlers/recovery.go @@ -36,12 +36,12 @@ func parseRecoveryOptions(h http.Handler, opts ...RecoveryOption) http.Handler { // // Example: // -// r := mux.NewRouter() -// r.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { -// panic("Unexpected error!") -// }) +// r := mux.NewRouter() +// r.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { +// panic("Unexpected error!") +// }) // -// http.ListenAndServe(":1123", handlers.RecoveryHandler()(r)) +// http.ListenAndServe(":1123", handlers.RecoveryHandler()(r)) func RecoveryHandler(opts ...RecoveryOption) func(h http.Handler) http.Handler { return func(h http.Handler) http.Handler { r := &recoveryHandler{handler: h} @@ -50,20 +50,22 @@ func RecoveryHandler(opts ...RecoveryOption) func(h http.Handler) http.Handler { } // RecoveryLogger is a functional option to override -// the default logger +// the default logger. func RecoveryLogger(logger RecoveryHandlerLogger) RecoveryOption { return func(h http.Handler) { - r := h.(*recoveryHandler) + r := h.(*recoveryHandler) //nolint:errcheck //TODO: + // @bharat-rajani should return type-assertion error but would break the API? r.logger = logger } } // PrintRecoveryStack is a functional option to enable // or disable printing stack traces on panic. -func PrintRecoveryStack(print bool) RecoveryOption { +func PrintRecoveryStack(shouldPrint bool) RecoveryOption { return func(h http.Handler) { - r := h.(*recoveryHandler) - r.printStack = print + r := h.(*recoveryHandler) //nolint:errcheck //TODO: + // @bharat-rajani should return type-assertion error but would break the API? + r.printStack = shouldPrint } } diff --git a/vendor/github.com/huandu/xstrings/convert.go b/vendor/github.com/huandu/xstrings/convert.go index 151c3151d..cba0d0725 100644 --- a/vendor/github.com/huandu/xstrings/convert.go +++ b/vendor/github.com/huandu/xstrings/convert.go @@ -12,11 +12,12 @@ // ToCamelCase is to convert words separated by space, underscore and hyphen to camel case. // // Some samples. -// "some_words" => "SomeWords" -// "http_server" => "HttpServer" -// "no_https" => "NoHttps" -// "_complex__case_" => "_Complex_Case_" -// "some words" => "SomeWords" +// +// "some_words" => "SomeWords" +// "http_server" => "HttpServer" +// "no_https" => "NoHttps" +// "_complex__case_" => "_Complex_Case_" +// "some words" => "SomeWords" func ToCamelCase(str string) string { if len(str) == 0 { return "" @@ -61,7 +62,6 @@ func ToCamelCase(str string) string { if isConnector(r1) { r0 = unicode.ToUpper(r0) } else { - r0 = unicode.ToLower(r0) buf.WriteRune(r1) } } @@ -74,16 +74,17 @@ func ToCamelCase(str string) string { // snake case format. // // Some samples. -// "FirstName" => "first_name" -// "HTTPServer" => "http_server" -// "NoHTTPS" => "no_https" -// "GO_PATH" => "go_path" -// "GO PATH" => "go_path" // space is converted to underscore. -// "GO-PATH" => "go_path" // hyphen is converted to underscore. -// "http2xx" => "http_2xx" // insert an underscore before a number and after an alphabet. -// "HTTP20xOK" => "http_20x_ok" -// "Duration2m3s" => "duration_2m3s" -// "Bld4Floor3rd" => "bld4_floor_3rd" +// +// "FirstName" => "first_name" +// "HTTPServer" => "http_server" +// "NoHTTPS" => "no_https" +// "GO_PATH" => "go_path" +// "GO PATH" => "go_path" // space is converted to underscore. +// "GO-PATH" => "go_path" // hyphen is converted to underscore. +// "http2xx" => "http_2xx" // insert an underscore before a number and after an alphabet. +// "HTTP20xOK" => "http_20x_ok" +// "Duration2m3s" => "duration_2m3s" +// "Bld4Floor3rd" => "bld4_floor_3rd" func ToSnakeCase(str string) string { return camelCaseToLowerCase(str, '_') } @@ -92,16 +93,17 @@ func ToSnakeCase(str string) string { // kebab case format. // // Some samples. -// "FirstName" => "first-name" -// "HTTPServer" => "http-server" -// "NoHTTPS" => "no-https" -// "GO_PATH" => "go-path" -// "GO PATH" => "go-path" // space is converted to '-'. -// "GO-PATH" => "go-path" // hyphen is converted to '-'. -// "http2xx" => "http-2xx" // insert an underscore before a number and after an alphabet. -// "HTTP20xOK" => "http-20x-ok" -// "Duration2m3s" => "duration-2m3s" -// "Bld4Floor3rd" => "bld4-floor-3rd" +// +// "FirstName" => "first-name" +// "HTTPServer" => "http-server" +// "NoHTTPS" => "no-https" +// "GO_PATH" => "go-path" +// "GO PATH" => "go-path" // space is converted to '-'. +// "GO-PATH" => "go-path" // hyphen is converted to '-'. +// "http2xx" => "http-2xx" // insert an underscore before a number and after an alphabet. +// "HTTP20xOK" => "http-20x-ok" +// "Duration2m3s" => "duration-2m3s" +// "Bld4Floor3rd" => "bld4-floor-3rd" func ToKebabCase(str string) string { return camelCaseToLowerCase(str, '-') } @@ -510,17 +512,18 @@ func ShuffleSource(str string, src rand.Source) string { // regardless whether the result is a valid rune or not. // // Only following characters are alphanumeric. -// * a - z -// * A - Z -// * 0 - 9 +// - a - z +// - A - Z +// - 0 - 9 // // Samples (borrowed from ruby's String#succ document): -// "abcd" => "abce" -// "THX1138" => "THX1139" -// "<>" => "<>" -// "1999zzz" => "2000aaa" -// "ZZZ9999" => "AAAA0000" -// "***" => "**+" +// +// "abcd" => "abce" +// "THX1138" => "THX1139" +// "<>" => "<>" +// "1999zzz" => "2000aaa" +// "ZZZ9999" => "AAAA0000" +// "***" => "**+" func Successor(str string) string { if str == "" { return str diff --git a/vendor/github.com/huandu/xstrings/format.go b/vendor/github.com/huandu/xstrings/format.go index 8cd76c525..b32219bbd 100644 --- a/vendor/github.com/huandu/xstrings/format.go +++ b/vendor/github.com/huandu/xstrings/format.go @@ -17,9 +17,10 @@ // If tabSize <= 0, ExpandTabs panics with error. // // Samples: -// ExpandTabs("a\tbc\tdef\tghij\tk", 4) => "a bc def ghij k" -// ExpandTabs("abcdefg\thij\nk\tl", 4) => "abcdefg hij\nk l" -// ExpandTabs("zไธญ\tๆ–‡\tw", 4) => "zไธญ ๆ–‡ w" +// +// ExpandTabs("a\tbc\tdef\tghij\tk", 4) => "a bc def ghij k" +// ExpandTabs("abcdefg\thij\nk\tl", 4) => "abcdefg hij\nk l" +// ExpandTabs("zไธญ\tๆ–‡\tw", 4) => "zไธญ ๆ–‡ w" func ExpandTabs(str string, tabSize int) string { if tabSize <= 0 { panic("tab size must be positive") @@ -74,9 +75,10 @@ func ExpandTabs(str string, tabSize int) string { // If pad is an empty string, str will be returned. // // Samples: -// LeftJustify("hello", 4, " ") => "hello" -// LeftJustify("hello", 10, " ") => "hello " -// LeftJustify("hello", 10, "123") => "hello12312" +// +// LeftJustify("hello", 4, " ") => "hello" +// LeftJustify("hello", 10, " ") => "hello " +// LeftJustify("hello", 10, "123") => "hello12312" func LeftJustify(str string, length int, pad string) string { l := Len(str) @@ -100,9 +102,10 @@ func LeftJustify(str string, length int, pad string) string { // If pad is an empty string, str will be returned. // // Samples: -// RightJustify("hello", 4, " ") => "hello" -// RightJustify("hello", 10, " ") => " hello" -// RightJustify("hello", 10, "123") => "12312hello" +// +// RightJustify("hello", 4, " ") => "hello" +// RightJustify("hello", 10, " ") => " hello" +// RightJustify("hello", 10, "123") => "12312hello" func RightJustify(str string, length int, pad string) string { l := Len(str) @@ -126,9 +129,10 @@ func RightJustify(str string, length int, pad string) string { // If pad is an empty string, str will be returned. // // Samples: -// Center("hello", 4, " ") => "hello" -// Center("hello", 10, " ") => " hello " -// Center("hello", 10, "123") => "12hello123" +// +// Center("hello", 4, " ") => "hello" +// Center("hello", 10, " ") => " hello " +// Center("hello", 10, "123") => "12hello123" func Center(str string, length int, pad string) string { l := Len(str) diff --git a/vendor/github.com/huandu/xstrings/manipulate.go b/vendor/github.com/huandu/xstrings/manipulate.go index 64075f9bb..ab42fe0fe 100644 --- a/vendor/github.com/huandu/xstrings/manipulate.go +++ b/vendor/github.com/huandu/xstrings/manipulate.go @@ -79,10 +79,12 @@ func Slice(str string, start, end int) string { // The return value is a slice of strings with head, match and tail. // // If str contains sep, for example "hello" and "l", Partition returns -// "he", "l", "lo" +// +// "he", "l", "lo" // // If str doesn't contain sep, for example "hello" and "x", Partition returns -// "hello", "", "" +// +// "hello", "", "" func Partition(str, sep string) (head, match, tail string) { index := strings.Index(str, sep) @@ -101,10 +103,12 @@ func Partition(str, sep string) (head, match, tail string) { // The return value is a slice of strings with head, match and tail. // // If str contains sep, for example "hello" and "l", LastPartition returns -// "hel", "l", "o" +// +// "hel", "l", "o" // // If str doesn't contain sep, for example "hello" and "x", LastPartition returns -// "", "", "hello" +// +// "", "", "hello" func LastPartition(str, sep string) (head, match, tail string) { index := strings.LastIndex(str, sep) diff --git a/vendor/github.com/huandu/xstrings/stringbuilder.go b/vendor/github.com/huandu/xstrings/stringbuilder.go index bb0919d32..06812fea0 100644 --- a/vendor/github.com/huandu/xstrings/stringbuilder.go +++ b/vendor/github.com/huandu/xstrings/stringbuilder.go @@ -1,4 +1,5 @@ -//+build go1.10 +//go:build go1.10 +// +build go1.10 package xstrings diff --git a/vendor/github.com/huandu/xstrings/stringbuilder_go110.go b/vendor/github.com/huandu/xstrings/stringbuilder_go110.go index dac389d13..ccaa5aedd 100644 --- a/vendor/github.com/huandu/xstrings/stringbuilder_go110.go +++ b/vendor/github.com/huandu/xstrings/stringbuilder_go110.go @@ -1,4 +1,5 @@ -//+build !go1.10 +//go:build !go1.10 +// +build !go1.10 package xstrings diff --git a/vendor/github.com/huandu/xstrings/translate.go b/vendor/github.com/huandu/xstrings/translate.go index 42e694fb1..1fac6a00b 100644 --- a/vendor/github.com/huandu/xstrings/translate.go +++ b/vendor/github.com/huandu/xstrings/translate.go @@ -416,14 +416,16 @@ func (tr *Translator) HasPattern() bool { // // From and to are patterns representing a set of characters. Pattern is defined as following. // -// * Special characters -// * '-' means a range of runes, e.g. -// * "a-z" means all characters from 'a' to 'z' inclusive; -// * "z-a" means all characters from 'z' to 'a' inclusive. -// * '^' as first character means a set of all runes excepted listed, e.g. -// * "^a-z" means all characters except 'a' to 'z' inclusive. -// * '\' escapes special characters. -// * Normal character represents itself, e.g. "abc" is a set including 'a', 'b' and 'c'. +// Special characters: +// +// 1. '-' means a range of runes, e.g. +// "a-z" means all characters from 'a' to 'z' inclusive; +// "z-a" means all characters from 'z' to 'a' inclusive. +// 2. '^' as first character means a set of all runes excepted listed, e.g. +// "^a-z" means all characters except 'a' to 'z' inclusive. +// 3. '\' escapes special characters. +// +// Normal character represents itself, e.g. "abc" is a set including 'a', 'b' and 'c'. // // Translate will try to find a 1:1 mapping from from to to. // If to is smaller than from, last rune in to will be used to map "out of range" characters in from. @@ -433,12 +435,13 @@ func (tr *Translator) HasPattern() bool { // If the to pattern is an empty string, Translate works exactly the same as Delete. // // Samples: -// Translate("hello", "aeiou", "12345") => "h2ll4" -// Translate("hello", "a-z", "A-Z") => "HELLO" -// Translate("hello", "z-a", "a-z") => "svool" -// Translate("hello", "aeiou", "*") => "h*ll*" -// Translate("hello", "^l", "*") => "**ll*" -// Translate("hello ^ world", `\^lo`, "*") => "he*** * w*r*d" +// +// Translate("hello", "aeiou", "12345") => "h2ll4" +// Translate("hello", "a-z", "A-Z") => "HELLO" +// Translate("hello", "z-a", "a-z") => "svool" +// Translate("hello", "aeiou", "*") => "h*ll*" +// Translate("hello", "^l", "*") => "**ll*" +// Translate("hello ^ world", `\^lo`, "*") => "he*** * w*r*d" func Translate(str, from, to string) string { tr := NewTranslator(from, to) return tr.Translate(str) @@ -448,9 +451,10 @@ func Translate(str, from, to string) string { // Pattern is defined in Translate function. // // Samples: -// Delete("hello", "aeiou") => "hll" -// Delete("hello", "a-k") => "llo" -// Delete("hello", "^a-k") => "he" +// +// Delete("hello", "aeiou") => "hll" +// Delete("hello", "a-k") => "llo" +// Delete("hello", "^a-k") => "he" func Delete(str, pattern string) string { tr := NewTranslator(pattern, "") return tr.Translate(str) @@ -460,9 +464,10 @@ func Delete(str, pattern string) string { // Pattern is defined in Translate function. // // Samples: -// Count("hello", "aeiou") => 3 -// Count("hello", "a-k") => 3 -// Count("hello", "^a-k") => 2 +// +// Count("hello", "aeiou") => 3 +// Count("hello", "a-k") => 3 +// Count("hello", "^a-k") => 2 func Count(str, pattern string) int { if pattern == "" || str == "" { return 0 @@ -491,9 +496,10 @@ func Count(str, pattern string) int { // If pattern is not empty, only runes matching the pattern will be squeezed. // // Samples: -// Squeeze("hello", "") => "helo" -// Squeeze("hello", "m-z") => "hello" -// Squeeze("hello world", " ") => "hello world" +// +// Squeeze("hello", "") => "helo" +// Squeeze("hello", "m-z") => "hello" +// Squeeze("hello world", " ") => "hello world" func Squeeze(str, pattern string) string { var last, r rune var size int diff --git a/vendor/github.com/imdario/mergo/CONTRIBUTING.md b/vendor/github.com/imdario/mergo/CONTRIBUTING.md new file mode 100644 index 000000000..0a1ff9f94 --- /dev/null +++ b/vendor/github.com/imdario/mergo/CONTRIBUTING.md @@ -0,0 +1,112 @@ + +# Contributing to mergo + +First off, thanks for taking the time to contribute! โค๏ธ + +All types of contributions are encouraged and valued. See the [Table of Contents](#table-of-contents) for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. ๐ŸŽ‰ + +> And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about: +> - Star the project +> - Tweet about it +> - Refer this project in your project's readme +> - Mention the project at local meetups and tell your friends/colleagues + + +## Table of Contents + +- [Code of Conduct](#code-of-conduct) +- [I Have a Question](#i-have-a-question) +- [I Want To Contribute](#i-want-to-contribute) +- [Reporting Bugs](#reporting-bugs) +- [Suggesting Enhancements](#suggesting-enhancements) + +## Code of Conduct + +This project and everyone participating in it is governed by the +[mergo Code of Conduct](https://github.com/imdario/mergoblob/master/CODE_OF_CONDUCT.md). +By participating, you are expected to uphold this code. Please report unacceptable behavior +to <>. + + +## I Have a Question + +> If you want to ask a question, we assume that you have read the available [Documentation](https://pkg.go.dev/github.com/imdario/mergo). + +Before you ask a question, it is best to search for existing [Issues](https://github.com/imdario/mergo/issues) that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue. It is also advisable to search the internet for answers first. + +If you then still feel the need to ask a question and need clarification, we recommend the following: + +- Open an [Issue](https://github.com/imdario/mergo/issues/new). +- Provide as much context as you can about what you're running into. +- Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant. + +We will then take care of the issue as soon as possible. + +## I Want To Contribute + +> ### Legal Notice +> When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license. + +### Reporting Bugs + + +#### Before Submitting a Bug Report + +A good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible. + +- Make sure that you are using the latest version. +- Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the [documentation](). If you are looking for support, you might want to check [this section](#i-have-a-question)). +- To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the [bug tracker](https://github.com/imdario/mergoissues?q=label%3Abug). +- Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue. +- Collect information about the bug: +- Stack trace (Traceback) +- OS, Platform and Version (Windows, Linux, macOS, x86, ARM) +- Version of the interpreter, compiler, SDK, runtime environment, package manager, depending on what seems relevant. +- Possibly your input and the output +- Can you reliably reproduce the issue? And can you also reproduce it with older versions? + + +#### How Do I Submit a Good Bug Report? + +> You must never report security related issues, vulnerabilities or bugs including sensitive information to the issue tracker, or elsewhere in public. Instead sensitive bugs must be sent by email to . + + +We use GitHub issues to track bugs and errors. If you run into an issue with the project: + +- Open an [Issue](https://github.com/imdario/mergo/issues/new). (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.) +- Explain the behavior you would expect and the actual behavior. +- Please provide as much context as possible and describe the *reproduction steps* that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case. +- Provide the information you collected in the previous section. + +Once it's filed: + +- The project team will label the issue accordingly. +- A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as `needs-repro`. Bugs with the `needs-repro` tag will not be addressed until they are reproduced. +- If the team is able to reproduce the issue, it will be marked `needs-fix`, as well as possibly other tags (such as `critical`), and the issue will be left to be implemented by someone. + +### Suggesting Enhancements + +This section guides you through submitting an enhancement suggestion for mergo, **including completely new features and minor improvements to existing functionality**. Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions. + + +#### Before Submitting an Enhancement + +- Make sure that you are using the latest version. +- Read the [documentation]() carefully and find out if the functionality is already covered, maybe by an individual configuration. +- Perform a [search](https://github.com/imdario/mergo/issues) to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one. +- Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library. + + +#### How Do I Submit a Good Enhancement Suggestion? + +Enhancement suggestions are tracked as [GitHub issues](https://github.com/imdario/mergo/issues). + +- Use a **clear and descriptive title** for the issue to identify the suggestion. +- Provide a **step-by-step description of the suggested enhancement** in as many details as possible. +- **Describe the current behavior** and **explain which behavior you expected to see instead** and why. At this point you can also tell which alternatives do not work for you. +- You may want to **include screenshots and animated GIFs** which help you demonstrate the steps or point out the part which the suggestion is related to. You can use [this tool](https://www.cockos.com/licecap/) to record GIFs on macOS and Windows, and [this tool](https://github.com/colinkeenan/silentcast) or [this tool](https://github.com/GNOME/byzanz) on Linux. +- **Explain why this enhancement would be useful** to most mergo users. You may also want to point out the other projects that solved it better and which could serve as inspiration. + + +## Attribution +This guide is based on the **contributing-gen**. [Make your own](https://github.com/bttger/contributing-gen)! diff --git a/vendor/github.com/imdario/mergo/README.md b/vendor/github.com/imdario/mergo/README.md index aa8cbd7ce..ffbbb62c7 100644 --- a/vendor/github.com/imdario/mergo/README.md +++ b/vendor/github.com/imdario/mergo/README.md @@ -1,18 +1,20 @@ # Mergo - -[![GoDoc][3]][4] [![GitHub release][5]][6] [![GoCard][7]][8] -[![Build Status][1]][2] -[![Coverage Status][9]][10] +[![Test status][1]][2] +[![OpenSSF Scorecard][21]][22] +[![OpenSSF Best Practices][19]][20] +[![Coverage status][9]][10] [![Sourcegraph][11]][12] -[![FOSSA Status][13]][14] +[![FOSSA status][13]][14] -[![GoCenter Kudos][15]][16] +[![GoDoc][3]][4] +[![Become my sponsor][15]][16] +[![Tidelift][17]][18] -[1]: https://travis-ci.org/imdario/mergo.png -[2]: https://travis-ci.org/imdario/mergo +[1]: https://github.com/imdario/mergo/workflows/tests/badge.svg?branch=master +[2]: https://github.com/imdario/mergo/actions/workflows/tests.yml [3]: https://godoc.org/github.com/imdario/mergo?status.svg [4]: https://godoc.org/github.com/imdario/mergo [5]: https://img.shields.io/github/release/imdario/mergo.svg @@ -25,8 +27,14 @@ [12]: https://sourcegraph.com/github.com/imdario/mergo?badge [13]: https://app.fossa.io/api/projects/git%2Bgithub.com%2Fimdario%2Fmergo.svg?type=shield [14]: https://app.fossa.io/projects/git%2Bgithub.com%2Fimdario%2Fmergo?ref=badge_shield -[15]: https://search.gocenter.io/api/ui/badge/github.com%2Fimdario%2Fmergo -[16]: https://search.gocenter.io/github.com/imdario/mergo +[15]: https://img.shields.io/github/sponsors/imdario +[16]: https://github.com/sponsors/imdario +[17]: https://tidelift.com/badges/package/go/github.com%2Fimdario%2Fmergo +[18]: https://tidelift.com/subscription/pkg/go-github.com-imdario-mergo +[19]: https://bestpractices.coreinfrastructure.org/projects/7177/badge +[20]: https://bestpractices.coreinfrastructure.org/projects/7177 +[21]: https://api.securityscorecards.dev/projects/github.com/imdario/mergo/badge +[22]: https://api.securityscorecards.dev/projects/github.com/imdario/mergo A helper to merge structs and maps in Golang. Useful for configuration default values, avoiding messy if-statements. @@ -36,11 +44,11 @@ Also a lovely [comune](http://en.wikipedia.org/wiki/Mergo) (municipality) in the ## Status -It is ready for production use. [It is used in several projects by Docker, Google, The Linux Foundation, VMWare, Shopify, etc](https://github.com/imdario/mergo#mergo-in-the-wild). +It is ready for production use. [It is used in several projects by Docker, Google, The Linux Foundation, VMWare, Shopify, Microsoft, etc](https://github.com/imdario/mergo#mergo-in-the-wild). ### Important note -Please keep in mind that a problematic PR broke [0.3.9](//github.com/imdario/mergo/releases/tag/0.3.9). I reverted it in [0.3.10](//github.com/imdario/mergo/releases/tag/0.3.10), and I consider it stable but not bug-free. Also, this version adds suppot for go modules. +Please keep in mind that a problematic PR broke [0.3.9](//github.com/imdario/mergo/releases/tag/0.3.9). I reverted it in [0.3.10](//github.com/imdario/mergo/releases/tag/0.3.10), and I consider it stable but not bug-free. Also, this version adds support for go modules. Keep in mind that in [0.3.2](//github.com/imdario/mergo/releases/tag/0.3.2), Mergo changed `Merge()`and `Map()` signatures to support [transformers](#transformers). I added an optional/variadic argument so that it won't break the existing code. @@ -51,9 +59,8 @@ If you were using Mergo before April 6th, 2015, please check your project works If Mergo is useful to you, consider buying me a coffee, a beer, or making a monthly donation to allow me to keep building great free software. :heart_eyes: Buy Me a Coffee at ko-fi.com -[![Beerpay](https://beerpay.io/imdario/mergo/badge.svg)](https://beerpay.io/imdario/mergo) -[![Beerpay](https://beerpay.io/imdario/mergo/make-wish.svg)](https://beerpay.io/imdario/mergo) Donate using Liberapay +Become my sponsor ### Mergo in the wild @@ -98,6 +105,8 @@ If Mergo is useful to you, consider buying me a coffee, a beer, or making a mont - [jnuthong/item_search](https://github.com/jnuthong/item_search) - [bukalapak/snowboard](https://github.com/bukalapak/snowboard) - [containerssh/containerssh](https://github.com/containerssh/containerssh) +- [goreleaser/goreleaser](https://github.com/goreleaser/goreleaser) +- [tjpnz/structbot](https://github.com/tjpnz/structbot) ## Install @@ -168,7 +177,7 @@ func main() { Note: if test are failing due missing package, please execute: - go get gopkg.in/yaml.v2 + go get gopkg.in/yaml.v3 ### Transformers @@ -218,7 +227,6 @@ func main() { } ``` - ## Contact me If I can help you, you have an idea or you are using Mergo in your projects, don't hesitate to drop me a line (or a pull request): [@im_dario](https://twitter.com/im_dario) @@ -227,21 +235,8 @@ If I can help you, you have an idea or you are using Mergo in your projects, don Written by [Dario Castaรฑรฉ](http://dario.im). -## Top Contributors - -[![0](https://sourcerer.io/fame/imdario/imdario/mergo/images/0)](https://sourcerer.io/fame/imdario/imdario/mergo/links/0) -[![1](https://sourcerer.io/fame/imdario/imdario/mergo/images/1)](https://sourcerer.io/fame/imdario/imdario/mergo/links/1) -[![2](https://sourcerer.io/fame/imdario/imdario/mergo/images/2)](https://sourcerer.io/fame/imdario/imdario/mergo/links/2) -[![3](https://sourcerer.io/fame/imdario/imdario/mergo/images/3)](https://sourcerer.io/fame/imdario/imdario/mergo/links/3) -[![4](https://sourcerer.io/fame/imdario/imdario/mergo/images/4)](https://sourcerer.io/fame/imdario/imdario/mergo/links/4) -[![5](https://sourcerer.io/fame/imdario/imdario/mergo/images/5)](https://sourcerer.io/fame/imdario/imdario/mergo/links/5) -[![6](https://sourcerer.io/fame/imdario/imdario/mergo/images/6)](https://sourcerer.io/fame/imdario/imdario/mergo/links/6) -[![7](https://sourcerer.io/fame/imdario/imdario/mergo/images/7)](https://sourcerer.io/fame/imdario/imdario/mergo/links/7) - - ## License [BSD 3-Clause](http://opensource.org/licenses/BSD-3-Clause) license, as [Go language](http://golang.org/LICENSE). - [![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fimdario%2Fmergo.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2Fimdario%2Fmergo?ref=badge_large) diff --git a/vendor/github.com/imdario/mergo/SECURITY.md b/vendor/github.com/imdario/mergo/SECURITY.md new file mode 100644 index 000000000..a5de61f77 --- /dev/null +++ b/vendor/github.com/imdario/mergo/SECURITY.md @@ -0,0 +1,14 @@ +# Security Policy + +## Supported Versions + +| Version | Supported | +| ------- | ------------------ | +| 0.3.x | :white_check_mark: | +| < 0.3 | :x: | + +## Security contact information + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. diff --git a/vendor/github.com/imdario/mergo/map.go b/vendor/github.com/imdario/mergo/map.go index a13a7ee46..b50d5c2a4 100644 --- a/vendor/github.com/imdario/mergo/map.go +++ b/vendor/github.com/imdario/mergo/map.go @@ -44,7 +44,7 @@ func deepMap(dst, src reflect.Value, visited map[uintptr]*visit, depth int, conf } } // Remember, remember... - visited[h] = &visit{addr, typ, seen} + visited[h] = &visit{typ, seen, addr} } zeroValue := reflect.Value{} switch dst.Kind() { @@ -58,7 +58,7 @@ func deepMap(dst, src reflect.Value, visited map[uintptr]*visit, depth int, conf } fieldName := field.Name fieldName = changeInitialCase(fieldName, unicode.ToLower) - if v, ok := dstMap[fieldName]; !ok || (isEmptyValue(reflect.ValueOf(v)) || overwrite) { + if v, ok := dstMap[fieldName]; !ok || (isEmptyValue(reflect.ValueOf(v), !config.ShouldNotDereference) || overwrite) { dstMap[fieldName] = src.Field(i).Interface() } } @@ -142,7 +142,7 @@ func MapWithOverwrite(dst, src interface{}, opts ...func(*Config)) error { func _map(dst, src interface{}, opts ...func(*Config)) error { if dst != nil && reflect.ValueOf(dst).Kind() != reflect.Ptr { - return ErrNonPointerAgument + return ErrNonPointerArgument } var ( vDst, vSrc reflect.Value diff --git a/vendor/github.com/imdario/mergo/merge.go b/vendor/github.com/imdario/mergo/merge.go index 8c2a8fcd9..0ef9b2138 100644 --- a/vendor/github.com/imdario/mergo/merge.go +++ b/vendor/github.com/imdario/mergo/merge.go @@ -38,10 +38,11 @@ func isExportedComponent(field *reflect.StructField) bool { } type Config struct { + Transformers Transformers Overwrite bool + ShouldNotDereference bool AppendSlice bool TypeCheck bool - Transformers Transformers overwriteWithEmptyValue bool overwriteSliceWithEmptyValue bool sliceDeepCopy bool @@ -76,10 +77,10 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co } } // Remember, remember... - visited[h] = &visit{addr, typ, seen} + visited[h] = &visit{typ, seen, addr} } - if config.Transformers != nil && !isEmptyValue(dst) { + if config.Transformers != nil && !isReflectNil(dst) && dst.IsValid() { if fn := config.Transformers.Transformer(dst.Type()); fn != nil { err = fn(dst, src) return @@ -95,7 +96,7 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co } } } else { - if dst.CanSet() && (isReflectNil(dst) || overwrite) && (!isEmptyValue(src) || overwriteWithEmptySrc) { + if dst.CanSet() && (isReflectNil(dst) || overwrite) && (!isEmptyValue(src, !config.ShouldNotDereference) || overwriteWithEmptySrc) { dst.Set(src) } } @@ -110,7 +111,7 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co } if src.Kind() != reflect.Map { - if overwrite { + if overwrite && dst.CanSet() { dst.Set(src) } return @@ -162,7 +163,7 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co dstSlice = reflect.ValueOf(dstElement.Interface()) } - if (!isEmptyValue(src) || overwriteWithEmptySrc || overwriteSliceWithEmptySrc) && (overwrite || isEmptyValue(dst)) && !config.AppendSlice && !sliceDeepCopy { + if (!isEmptyValue(src, !config.ShouldNotDereference) || overwriteWithEmptySrc || overwriteSliceWithEmptySrc) && (overwrite || isEmptyValue(dst, !config.ShouldNotDereference)) && !config.AppendSlice && !sliceDeepCopy { if typeCheck && srcSlice.Type() != dstSlice.Type() { return fmt.Errorf("cannot override two slices with different type (%s, %s)", srcSlice.Type(), dstSlice.Type()) } @@ -194,22 +195,38 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co dst.SetMapIndex(key, dstSlice) } } - if dstElement.IsValid() && !isEmptyValue(dstElement) && (reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Map || reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Slice) { - continue + + if dstElement.IsValid() && !isEmptyValue(dstElement, !config.ShouldNotDereference) { + if reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Slice { + continue + } + if reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Map && reflect.TypeOf(dstElement.Interface()).Kind() == reflect.Map { + continue + } } - if srcElement.IsValid() && ((srcElement.Kind() != reflect.Ptr && overwrite) || !dstElement.IsValid() || isEmptyValue(dstElement)) { + if srcElement.IsValid() && ((srcElement.Kind() != reflect.Ptr && overwrite) || !dstElement.IsValid() || isEmptyValue(dstElement, !config.ShouldNotDereference)) { if dst.IsNil() { dst.Set(reflect.MakeMap(dst.Type())) } dst.SetMapIndex(key, srcElement) } } + + // Ensure that all keys in dst are deleted if they are not in src. + if overwriteWithEmptySrc { + for _, key := range dst.MapKeys() { + srcElement := src.MapIndex(key) + if !srcElement.IsValid() { + dst.SetMapIndex(key, reflect.Value{}) + } + } + } case reflect.Slice: if !dst.CanSet() { break } - if (!isEmptyValue(src) || overwriteWithEmptySrc || overwriteSliceWithEmptySrc) && (overwrite || isEmptyValue(dst)) && !config.AppendSlice && !sliceDeepCopy { + if (!isEmptyValue(src, !config.ShouldNotDereference) || overwriteWithEmptySrc || overwriteSliceWithEmptySrc) && (overwrite || isEmptyValue(dst, !config.ShouldNotDereference)) && !config.AppendSlice && !sliceDeepCopy { dst.Set(src) } else if config.AppendSlice { if src.Type() != dst.Type() { @@ -244,12 +261,18 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co if src.Kind() != reflect.Interface { if dst.IsNil() || (src.Kind() != reflect.Ptr && overwrite) { - if dst.CanSet() && (overwrite || isEmptyValue(dst)) { + if dst.CanSet() && (overwrite || isEmptyValue(dst, !config.ShouldNotDereference)) { dst.Set(src) } } else if src.Kind() == reflect.Ptr { - if err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, config); err != nil { - return + if !config.ShouldNotDereference { + if err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, config); err != nil { + return + } + } else { + if overwriteWithEmptySrc || (overwrite && !src.IsNil()) || dst.IsNil() { + dst.Set(src) + } } } else if dst.Elem().Type() == src.Type() { if err = deepMerge(dst.Elem(), src, visited, depth+1, config); err != nil { @@ -262,7 +285,7 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co } if dst.IsNil() || overwrite { - if dst.CanSet() && (overwrite || isEmptyValue(dst)) { + if dst.CanSet() && (overwrite || isEmptyValue(dst, !config.ShouldNotDereference)) { dst.Set(src) } break @@ -275,7 +298,7 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co break } default: - mustSet := (isEmptyValue(dst) || overwrite) && (!isEmptyValue(src) || overwriteWithEmptySrc) + mustSet := (isEmptyValue(dst, !config.ShouldNotDereference) || overwrite) && (!isEmptyValue(src, !config.ShouldNotDereference) || overwriteWithEmptySrc) if mustSet { if dst.CanSet() { dst.Set(src) @@ -326,6 +349,12 @@ func WithOverrideEmptySlice(config *Config) { config.overwriteSliceWithEmptyValue = true } +// WithoutDereference prevents dereferencing pointers when evaluating whether they are empty +// (i.e. a non-nil pointer is never considered empty). +func WithoutDereference(config *Config) { + config.ShouldNotDereference = true +} + // WithAppendSlice will make merge append slices instead of overwriting it. func WithAppendSlice(config *Config) { config.AppendSlice = true @@ -344,7 +373,7 @@ func WithSliceDeepCopy(config *Config) { func merge(dst, src interface{}, opts ...func(*Config)) error { if dst != nil && reflect.ValueOf(dst).Kind() != reflect.Ptr { - return ErrNonPointerAgument + return ErrNonPointerArgument } var ( vDst, vSrc reflect.Value diff --git a/vendor/github.com/imdario/mergo/mergo.go b/vendor/github.com/imdario/mergo/mergo.go index 3cc926c7f..0a721e2d8 100644 --- a/vendor/github.com/imdario/mergo/mergo.go +++ b/vendor/github.com/imdario/mergo/mergo.go @@ -17,10 +17,10 @@ var ( ErrNilArguments = errors.New("src and dst must not be nil") ErrDifferentArgumentsTypes = errors.New("src and dst must be of same type") - ErrNotSupported = errors.New("only structs and maps are supported") + ErrNotSupported = errors.New("only structs, maps, and slices are supported") ErrExpectedMapAsDestination = errors.New("dst was expected to be a map") ErrExpectedStructAsDestination = errors.New("dst was expected to be a struct") - ErrNonPointerAgument = errors.New("dst must be a pointer") + ErrNonPointerArgument = errors.New("dst must be a pointer") ) // During deepMerge, must keep track of checks that are @@ -28,13 +28,13 @@ // checks in progress are true when it reencounters them. // Visited are stored in a map indexed by 17 * a1 + a2; type visit struct { - ptr uintptr typ reflect.Type next *visit + ptr uintptr } // From src/pkg/encoding/json/encode.go. -func isEmptyValue(v reflect.Value) bool { +func isEmptyValue(v reflect.Value, shouldDereference bool) bool { switch v.Kind() { case reflect.Array, reflect.Map, reflect.Slice, reflect.String: return v.Len() == 0 @@ -50,7 +50,10 @@ func isEmptyValue(v reflect.Value) bool { if v.IsNil() { return true } - return isEmptyValue(v.Elem()) + if shouldDereference { + return isEmptyValue(v.Elem(), shouldDereference) + } + return false case reflect.Func: return v.IsNil() case reflect.Invalid: @@ -65,7 +68,7 @@ func resolveValues(dst, src interface{}) (vDst, vSrc reflect.Value, err error) { return } vDst = reflect.ValueOf(dst).Elem() - if vDst.Kind() != reflect.Struct && vDst.Kind() != reflect.Map { + if vDst.Kind() != reflect.Struct && vDst.Kind() != reflect.Map && vDst.Kind() != reflect.Slice { err = ErrNotSupported return } diff --git a/vendor/github.com/shopspring/decimal/.gitignore b/vendor/github.com/shopspring/decimal/.gitignore index 8a43ce9d7..ff36b987f 100644 --- a/vendor/github.com/shopspring/decimal/.gitignore +++ b/vendor/github.com/shopspring/decimal/.gitignore @@ -4,3 +4,6 @@ # IntelliJ .idea/ *.iml + +# VS code +*.code-workspace diff --git a/vendor/github.com/shopspring/decimal/.travis.yml b/vendor/github.com/shopspring/decimal/.travis.yml index 55d42b289..6326d40f0 100644 --- a/vendor/github.com/shopspring/decimal/.travis.yml +++ b/vendor/github.com/shopspring/decimal/.travis.yml @@ -1,9 +1,15 @@ language: go +arch: + - amd64 + - ppc64le + go: - 1.7.x - - 1.12.x - - 1.13.x + - 1.14.x + - 1.15.x + - 1.16.x + - 1.17.x - tip install: diff --git a/vendor/github.com/shopspring/decimal/CHANGELOG.md b/vendor/github.com/shopspring/decimal/CHANGELOG.md index 01ba02feb..aea61154b 100644 --- a/vendor/github.com/shopspring/decimal/CHANGELOG.md +++ b/vendor/github.com/shopspring/decimal/CHANGELOG.md @@ -1,4 +1,34 @@ -## Decimal v1.2.0 +## Decimal v1.3.1 + +#### ENHANCEMENTS +- Reduce memory allocation in case of initialization from big.Int [#252](https://github.com/shopspring/decimal/pull/252) + +#### BUGFIXES +- Fix binary marshalling of decimal zero value [#253](https://github.com/shopspring/decimal/pull/253) + +## Decimal v1.3.0 + +#### FEATURES +- Add NewFromFormattedString initializer [#184](https://github.com/shopspring/decimal/pull/184) +- Add NewNullDecimal initializer [#234](https://github.com/shopspring/decimal/pull/234) +- Add implementation of natural exponent function (Taylor, Hull-Abraham) [#229](https://github.com/shopspring/decimal/pull/229) +- Add RoundUp, RoundDown, RoundCeil, RoundFloor methods [#196](https://github.com/shopspring/decimal/pull/196) [#202](https://github.com/shopspring/decimal/pull/202) [#220](https://github.com/shopspring/decimal/pull/220) +- Add XML support for NullDecimal [#192](https://github.com/shopspring/decimal/pull/192) +- Add IsInteger method [#179](https://github.com/shopspring/decimal/pull/179) +- Add Copy helper method [#123](https://github.com/shopspring/decimal/pull/123) +- Add InexactFloat64 helper method [#205](https://github.com/shopspring/decimal/pull/205) +- Add CoefficientInt64 helper method [#244](https://github.com/shopspring/decimal/pull/244) + +#### ENHANCEMENTS +- Performance optimization of NewFromString init method [#198](https://github.com/shopspring/decimal/pull/198) +- Performance optimization of Abs and Round methods [#240](https://github.com/shopspring/decimal/pull/240) +- Additional tests (CI) for ppc64le architecture [#188](https://github.com/shopspring/decimal/pull/188) + +#### BUGFIXES +- Fix rounding in FormatFloat fallback path (roundShortest method, fix taken from Go main repository) [#161](https://github.com/shopspring/decimal/pull/161) +- Add slice range checks to UnmarshalBinary method [#232](https://github.com/shopspring/decimal/pull/232) + +## Decimal v1.2.0 #### BREAKING - Drop support for Go version older than 1.7 [#172](https://github.com/shopspring/decimal/pull/172) diff --git a/vendor/github.com/shopspring/decimal/README.md b/vendor/github.com/shopspring/decimal/README.md index b70f90159..2e35df068 100644 --- a/vendor/github.com/shopspring/decimal/README.md +++ b/vendor/github.com/shopspring/decimal/README.md @@ -1,6 +1,6 @@ # decimal -[![Build Status](https://travis-ci.org/shopspring/decimal.png?branch=master)](https://travis-ci.org/shopspring/decimal) [![GoDoc](https://godoc.org/github.com/shopspring/decimal?status.svg)](https://godoc.org/github.com/shopspring/decimal) [![Go Report Card](https://goreportcard.com/badge/github.com/shopspring/decimal)](https://goreportcard.com/report/github.com/shopspring/decimal) +[![Build Status](https://app.travis-ci.com/shopspring/decimal.svg?branch=master)](https://app.travis-ci.com/shopspring/decimal) [![GoDoc](https://godoc.org/github.com/shopspring/decimal?status.svg)](https://godoc.org/github.com/shopspring/decimal) [![Go Report Card](https://goreportcard.com/badge/github.com/shopspring/decimal)](https://goreportcard.com/report/github.com/shopspring/decimal) Arbitrary-precision fixed-point decimal numbers in go. diff --git a/vendor/github.com/shopspring/decimal/decimal.go b/vendor/github.com/shopspring/decimal/decimal.go index 801c1a045..84405ec1c 100644 --- a/vendor/github.com/shopspring/decimal/decimal.go +++ b/vendor/github.com/shopspring/decimal/decimal.go @@ -22,6 +22,7 @@ "fmt" "math" "math/big" + "regexp" "strconv" "strings" ) @@ -51,6 +52,10 @@ // silently lose precision. var MarshalJSONWithoutQuotes = false +// ExpMaxIterations specifies the maximum number of iterations needed to calculate +// precise natural exponent value using ExpHullAbrham method. +var ExpMaxIterations = 1000 + // Zero constant, to make computations faster. // Zero should never be compared with == or != directly, please use decimal.Equal or decimal.Cmp instead. var Zero = New(0, 1) @@ -63,6 +68,8 @@ var tenInt = big.NewInt(10) var twentyInt = big.NewInt(20) +var factorials = []Decimal{New(1, 0)} + // Decimal represents a fixed-point decimal. It is immutable. // number = value * 10 ^ exp type Decimal struct { @@ -113,7 +120,7 @@ func NewFromInt32(value int32) Decimal { // NewFromBigInt returns a new Decimal from a big.Int, value * 10 ^ exp func NewFromBigInt(value *big.Int, exp int32) Decimal { return Decimal{ - value: big.NewInt(0).Set(value), + value: new(big.Int).Set(value), exp: exp, } } @@ -146,23 +153,45 @@ func NewFromString(value string) (Decimal, error) { exp = expInt } - parts := strings.Split(value, ".") - if len(parts) == 1 { + pIndex := -1 + vLen := len(value) + for i := 0; i < vLen; i++ { + if value[i] == '.' { + if pIndex > -1 { + return Decimal{}, fmt.Errorf("can't convert %s to decimal: too many .s", value) + } + pIndex = i + } + } + + if pIndex == -1 { // There is no decimal point, we can just parse the original string as // an int intString = value - } else if len(parts) == 2 { - intString = parts[0] + parts[1] - expInt := -len(parts[1]) - exp += int64(expInt) } else { - return Decimal{}, fmt.Errorf("can't convert %s to decimal: too many .s", value) + if pIndex+1 < vLen { + intString = value[:pIndex] + value[pIndex+1:] + } else { + intString = value[:pIndex] + } + expInt := -len(value[pIndex+1:]) + exp += int64(expInt) } - dValue := new(big.Int) - _, ok := dValue.SetString(intString, 10) - if !ok { - return Decimal{}, fmt.Errorf("can't convert %s to decimal", value) + var dValue *big.Int + // strconv.ParseInt is faster than new(big.Int).SetString so this is just a shortcut for strings we know won't overflow + if len(intString) <= 18 { + parsed64, err := strconv.ParseInt(intString, 10, 64) + if err != nil { + return Decimal{}, fmt.Errorf("can't convert %s to decimal", value) + } + dValue = big.NewInt(parsed64) + } else { + dValue = new(big.Int) + _, ok := dValue.SetString(intString, 10) + if !ok { + return Decimal{}, fmt.Errorf("can't convert %s to decimal", value) + } } if exp < math.MinInt32 || exp > math.MaxInt32 { @@ -176,6 +205,30 @@ func NewFromString(value string) (Decimal, error) { }, nil } +// NewFromFormattedString returns a new Decimal from a formatted string representation. +// The second argument - replRegexp, is a regular expression that is used to find characters that should be +// removed from given decimal string representation. All matched characters will be replaced with an empty string. +// +// Example: +// +// r := regexp.MustCompile("[$,]") +// d1, err := NewFromFormattedString("$5,125.99", r) +// +// r2 := regexp.MustCompile("[_]") +// d2, err := NewFromFormattedString("1_000_000", r2) +// +// r3 := regexp.MustCompile("[USD\\s]") +// d3, err := NewFromFormattedString("5000 USD", r3) +// +func NewFromFormattedString(value string, replRegexp *regexp.Regexp) (Decimal, error) { + parsedValue := replRegexp.ReplaceAllString(value, "") + d, err := NewFromString(parsedValue) + if err != nil { + return Decimal{}, err + } + return d, nil +} + // RequireFromString returns a new Decimal from a string representation // or panics if NewFromString would have returned an error. // @@ -361,6 +414,15 @@ func NewFromFloatWithExponent(value float64, exp int32) Decimal { } } +// Copy returns a copy of decimal with the same value and exponent, but a different pointer to value. +func (d Decimal) Copy() Decimal { + d.ensureInitialized() + return Decimal{ + value: &(*d.value), + exp: d.exp, + } +} + // rescale returns a rescaled version of the decimal. Returned // decimal may be less precise if the given exponent is bigger // than the initial exponent of the Decimal. @@ -410,6 +472,9 @@ func (d Decimal) rescale(exp int32) Decimal { // Abs returns the absolute value of the decimal. func (d Decimal) Abs() Decimal { + if !d.IsNegative() { + return d + } d.ensureInitialized() d2Value := new(big.Int).Abs(d.value) return Decimal{ @@ -583,6 +648,207 @@ func (d Decimal) Pow(d2 Decimal) Decimal { return temp.Mul(temp).Div(d) } +// ExpHullAbrham calculates the natural exponent of decimal (e to the power of d) using Hull-Abraham algorithm. +// OverallPrecision argument specifies the overall precision of the result (integer part + decimal part). +// +// ExpHullAbrham is faster than ExpTaylor for small precision values, but it is much slower for large precision values. +// +// Example: +// +// NewFromFloat(26.1).ExpHullAbrham(2).String() // output: "220000000000" +// NewFromFloat(26.1).ExpHullAbrham(20).String() // output: "216314672147.05767284" +// +func (d Decimal) ExpHullAbrham(overallPrecision uint32) (Decimal, error) { + // Algorithm based on Variable precision exponential function. + // ACM Transactions on Mathematical Software by T. E. Hull & A. Abrham. + if d.IsZero() { + return Decimal{oneInt, 0}, nil + } + + currentPrecision := overallPrecision + + // Algorithm does not work if currentPrecision * 23 < |x|. + // Precision is automatically increased in such cases, so the value can be calculated precisely. + // If newly calculated precision is higher than ExpMaxIterations the currentPrecision will not be changed. + f := d.Abs().InexactFloat64() + if ncp := f / 23; ncp > float64(currentPrecision) && ncp < float64(ExpMaxIterations) { + currentPrecision = uint32(math.Ceil(ncp)) + } + + // fail if abs(d) beyond an over/underflow threshold + overflowThreshold := New(23*int64(currentPrecision), 0) + if d.Abs().Cmp(overflowThreshold) > 0 { + return Decimal{}, fmt.Errorf("over/underflow threshold, exp(x) cannot be calculated precisely") + } + + // Return 1 if abs(d) small enough; this also avoids later over/underflow + overflowThreshold2 := New(9, -int32(currentPrecision)-1) + if d.Abs().Cmp(overflowThreshold2) <= 0 { + return Decimal{oneInt, d.exp}, nil + } + + // t is the smallest integer >= 0 such that the corresponding abs(d/k) < 1 + t := d.exp + int32(d.NumDigits()) // Add d.NumDigits because the paper assumes that d.value [0.1, 1) + + if t < 0 { + t = 0 + } + + k := New(1, t) // reduction factor + r := Decimal{new(big.Int).Set(d.value), d.exp - t} // reduced argument + p := int32(currentPrecision) + t + 2 // precision for calculating the sum + + // Determine n, the number of therms for calculating sum + // use first Newton step (1.435p - 1.182) / log10(p/abs(r)) + // for solving appropriate equation, along with directed + // roundings and simple rational bound for log10(p/abs(r)) + rf := r.Abs().InexactFloat64() + pf := float64(p) + nf := math.Ceil((1.453*pf - 1.182) / math.Log10(pf/rf)) + if nf > float64(ExpMaxIterations) || math.IsNaN(nf) { + return Decimal{}, fmt.Errorf("exact value cannot be calculated in <=ExpMaxIterations iterations") + } + n := int64(nf) + + tmp := New(0, 0) + sum := New(1, 0) + one := New(1, 0) + for i := n - 1; i > 0; i-- { + tmp.value.SetInt64(i) + sum = sum.Mul(r.DivRound(tmp, p)) + sum = sum.Add(one) + } + + ki := k.IntPart() + res := New(1, 0) + for i := ki; i > 0; i-- { + res = res.Mul(sum) + } + + resNumDigits := int32(res.NumDigits()) + + var roundDigits int32 + if resNumDigits > abs(res.exp) { + roundDigits = int32(currentPrecision) - resNumDigits - res.exp + } else { + roundDigits = int32(currentPrecision) + } + + res = res.Round(roundDigits) + + return res, nil +} + +// ExpTaylor calculates the natural exponent of decimal (e to the power of d) using Taylor series expansion. +// Precision argument specifies how precise the result must be (number of digits after decimal point). +// Negative precision is allowed. +// +// ExpTaylor is much faster for large precision values than ExpHullAbrham. +// +// Example: +// +// d, err := NewFromFloat(26.1).ExpTaylor(2).String() +// d.String() // output: "216314672147.06" +// +// NewFromFloat(26.1).ExpTaylor(20).String() +// d.String() // output: "216314672147.05767284062928674083" +// +// NewFromFloat(26.1).ExpTaylor(-10).String() +// d.String() // output: "220000000000" +// +func (d Decimal) ExpTaylor(precision int32) (Decimal, error) { + // Note(mwoss): Implementation can be optimized by exclusively using big.Int API only + if d.IsZero() { + return Decimal{oneInt, 0}.Round(precision), nil + } + + var epsilon Decimal + var divPrecision int32 + if precision < 0 { + epsilon = New(1, -1) + divPrecision = 8 + } else { + epsilon = New(1, -precision-1) + divPrecision = precision + 1 + } + + decAbs := d.Abs() + pow := d.Abs() + factorial := New(1, 0) + + result := New(1, 0) + + for i := int64(1); ; { + step := pow.DivRound(factorial, divPrecision) + result = result.Add(step) + + // Stop Taylor series when current step is smaller than epsilon + if step.Cmp(epsilon) < 0 { + break + } + + pow = pow.Mul(decAbs) + + i++ + + // Calculate next factorial number or retrieve cached value + if len(factorials) >= int(i) && !factorials[i-1].IsZero() { + factorial = factorials[i-1] + } else { + // To avoid any race conditions, firstly the zero value is appended to a slice to create + // a spot for newly calculated factorial. After that, the zero value is replaced by calculated + // factorial using the index notation. + factorial = factorials[i-2].Mul(New(i, 0)) + factorials = append(factorials, Zero) + factorials[i-1] = factorial + } + } + + if d.Sign() < 0 { + result = New(1, 0).DivRound(result, precision+1) + } + + result = result.Round(precision) + return result, nil +} + +// NumDigits returns the number of digits of the decimal coefficient (d.Value) +// Note: Current implementation is extremely slow for large decimals and/or decimals with large fractional part +func (d Decimal) NumDigits() int { + // Note(mwoss): It can be optimized, unnecessary cast of big.Int to string + if d.IsNegative() { + return len(d.value.String()) - 1 + } + return len(d.value.String()) +} + +// IsInteger returns true when decimal can be represented as an integer value, otherwise, it returns false. +func (d Decimal) IsInteger() bool { + // The most typical case, all decimal with exponent higher or equal 0 can be represented as integer + if d.exp >= 0 { + return true + } + // When the exponent is negative we have to check every number after the decimal place + // If all of them are zeroes, we are sure that given decimal can be represented as an integer + var r big.Int + q := new(big.Int).Set(d.value) + for z := abs(d.exp); z > 0; z-- { + q.QuoRem(q, tenInt, &r) + if r.Cmp(zeroInt) != 0 { + return false + } + } + return true +} + +// Abs calculates absolute value of any int32. Used for calculating absolute value of decimal's exponent. +func abs(n int32) int32 { + if n < 0 { + return -n + } + return n +} + // Cmp compares the numbers represented by d and d2 and returns: // // -1 if d < d2 @@ -679,12 +945,18 @@ func (d Decimal) Exponent() int32 { return d.exp } -// Coefficient returns the coefficient of the decimal. It is scaled by 10^Exponent() +// Coefficient returns the coefficient of the decimal. It is scaled by 10^Exponent() func (d Decimal) Coefficient() *big.Int { d.ensureInitialized() - // we copy the coefficient so that mutating the result does not mutate the - // Decimal. - return big.NewInt(0).Set(d.value) + // we copy the coefficient so that mutating the result does not mutate the Decimal. + return new(big.Int).Set(d.value) +} + +// CoefficientInt64 returns the coefficient of the decimal as int64. It is scaled by 10^Exponent() +// If coefficient cannot be represented in an int64, the result will be undefined. +func (d Decimal) CoefficientInt64() int64 { + d.ensureInitialized() + return d.value.Int64() } // IntPart returns the integer component of the decimal. @@ -730,6 +1002,13 @@ func (d Decimal) Float64() (f float64, exact bool) { return d.Rat().Float64() } +// InexactFloat64 returns the nearest float64 value for d. +// It doesn't indicate if the returned value represents d exactly. +func (d Decimal) InexactFloat64() float64 { + f, _ := d.Float64() + return f +} + // String returns the string representation of the decimal // with the fixed point. // @@ -798,6 +1077,9 @@ func (d Decimal) StringFixedCash(interval uint8) string { // NewFromFloat(545).Round(-1).String() // output: "550" // func (d Decimal) Round(places int32) Decimal { + if d.exp == -places { + return d + } // truncate to places + 1 ret := d.rescale(-places - 1) @@ -818,6 +1100,107 @@ func (d Decimal) Round(places int32) Decimal { return ret } +// RoundCeil rounds the decimal towards +infinity. +// +// Example: +// +// NewFromFloat(545).RoundCeil(-2).String() // output: "600" +// NewFromFloat(500).RoundCeil(-2).String() // output: "500" +// NewFromFloat(1.1001).RoundCeil(2).String() // output: "1.11" +// NewFromFloat(-1.454).RoundCeil(1).String() // output: "-1.5" +// +func (d Decimal) RoundCeil(places int32) Decimal { + if d.exp >= -places { + return d + } + + rescaled := d.rescale(-places) + if d.Equal(rescaled) { + return d + } + + if d.value.Sign() > 0 { + rescaled.value.Add(rescaled.value, oneInt) + } + + return rescaled +} + +// RoundFloor rounds the decimal towards -infinity. +// +// Example: +// +// NewFromFloat(545).RoundFloor(-2).String() // output: "500" +// NewFromFloat(-500).RoundFloor(-2).String() // output: "-500" +// NewFromFloat(1.1001).RoundFloor(2).String() // output: "1.1" +// NewFromFloat(-1.454).RoundFloor(1).String() // output: "-1.4" +// +func (d Decimal) RoundFloor(places int32) Decimal { + if d.exp >= -places { + return d + } + + rescaled := d.rescale(-places) + if d.Equal(rescaled) { + return d + } + + if d.value.Sign() < 0 { + rescaled.value.Sub(rescaled.value, oneInt) + } + + return rescaled +} + +// RoundUp rounds the decimal away from zero. +// +// Example: +// +// NewFromFloat(545).RoundUp(-2).String() // output: "600" +// NewFromFloat(500).RoundUp(-2).String() // output: "500" +// NewFromFloat(1.1001).RoundUp(2).String() // output: "1.11" +// NewFromFloat(-1.454).RoundUp(1).String() // output: "-1.4" +// +func (d Decimal) RoundUp(places int32) Decimal { + if d.exp >= -places { + return d + } + + rescaled := d.rescale(-places) + if d.Equal(rescaled) { + return d + } + + if d.value.Sign() > 0 { + rescaled.value.Add(rescaled.value, oneInt) + } else if d.value.Sign() < 0 { + rescaled.value.Sub(rescaled.value, oneInt) + } + + return rescaled +} + +// RoundDown rounds the decimal towards zero. +// +// Example: +// +// NewFromFloat(545).RoundDown(-2).String() // output: "500" +// NewFromFloat(-500).RoundDown(-2).String() // output: "-500" +// NewFromFloat(1.1001).RoundDown(2).String() // output: "1.1" +// NewFromFloat(-1.454).RoundDown(1).String() // output: "-1.5" +// +func (d Decimal) RoundDown(places int32) Decimal { + if d.exp >= -places { + return d + } + + rescaled := d.rescale(-places) + if d.Equal(rescaled) { + return d + } + return rescaled +} + // RoundBank rounds the decimal to places decimal places. // If the final digit to round is equidistant from the nearest two integers the // rounded value is taken as the even number @@ -826,12 +1209,12 @@ func (d Decimal) Round(places int32) Decimal { // // Examples: // -// NewFromFloat(5.45).Round(1).String() // output: "5.4" -// NewFromFloat(545).Round(-1).String() // output: "540" -// NewFromFloat(5.46).Round(1).String() // output: "5.5" -// NewFromFloat(546).Round(-1).String() // output: "550" -// NewFromFloat(5.55).Round(1).String() // output: "5.6" -// NewFromFloat(555).Round(-1).String() // output: "560" +// NewFromFloat(5.45).RoundBank(1).String() // output: "5.4" +// NewFromFloat(545).RoundBank(-1).String() // output: "540" +// NewFromFloat(5.46).RoundBank(1).String() // output: "5.5" +// NewFromFloat(546).RoundBank(-1).String() // output: "550" +// NewFromFloat(5.55).RoundBank(1).String() // output: "5.6" +// NewFromFloat(555).RoundBank(-1).String() // output: "560" // func (d Decimal) RoundBank(places int32) Decimal { @@ -970,12 +1353,22 @@ func (d Decimal) MarshalJSON() ([]byte, error) { // UnmarshalBinary implements the encoding.BinaryUnmarshaler interface. As a string representation // is already used when encoding to text, this method stores that string as []byte func (d *Decimal) UnmarshalBinary(data []byte) error { + // Verify we have at least 4 bytes for the exponent. The GOB encoded value + // may be empty. + if len(data) < 4 { + return fmt.Errorf("error decoding binary %v: expected at least 4 bytes, got %d", data, len(data)) + } + // Extract the exponent d.exp = int32(binary.BigEndian.Uint32(data[:4])) // Extract the value d.value = new(big.Int) - return d.value.GobDecode(data[4:]) + if err := d.value.GobDecode(data[4:]); err != nil { + return fmt.Errorf("error decoding binary %v: %s", data, err) + } + + return nil } // MarshalBinary implements the encoding.BinaryMarshaler interface. @@ -1219,6 +1612,13 @@ type NullDecimal struct { Valid bool } +func NewNullDecimal(d Decimal) NullDecimal { + return NullDecimal{ + Decimal: d, + Valid: true, + } +} + // Scan implements the sql.Scanner interface for database deserialization. func (d *NullDecimal) Scan(value interface{}) error { if value == nil { @@ -1255,6 +1655,33 @@ func (d NullDecimal) MarshalJSON() ([]byte, error) { return d.Decimal.MarshalJSON() } +// UnmarshalText implements the encoding.TextUnmarshaler interface for XML +// deserialization +func (d *NullDecimal) UnmarshalText(text []byte) error { + str := string(text) + + // check for empty XML or XML without body e.g., + if str == "" { + d.Valid = false + return nil + } + if err := d.Decimal.UnmarshalText(text); err != nil { + d.Valid = false + return err + } + d.Valid = true + return nil +} + +// MarshalText implements the encoding.TextMarshaler interface for XML +// serialization. +func (d NullDecimal) MarshalText() (text []byte, err error) { + if !d.Valid { + return []byte{}, nil + } + return d.Decimal.MarshalText() +} + // Trig functions // Atan returns the arctangent, in radians, of x. diff --git a/vendor/github.com/shopspring/decimal/rounding.go b/vendor/github.com/shopspring/decimal/rounding.go index 8008f55cb..d4b0cd007 100644 --- a/vendor/github.com/shopspring/decimal/rounding.go +++ b/vendor/github.com/shopspring/decimal/rounding.go @@ -80,39 +80,80 @@ func roundShortest(d *decimal, mant uint64, exp int, flt *floatInfo) { // would round to the original mantissa and not the neighbors. inclusive := mant%2 == 0 + // As we walk the digits we want to know whether rounding up would fall + // within the upper bound. This is tracked by upperdelta: + // + // If upperdelta == 0, the digits of d and upper are the same so far. + // + // If upperdelta == 1, we saw a difference of 1 between d and upper on a + // previous digit and subsequently only 9s for d and 0s for upper. + // (Thus rounding up may fall outside the bound, if it is exclusive.) + // + // If upperdelta == 2, then the difference is greater than 1 + // and we know that rounding up falls within the bound. + var upperdelta uint8 + // Now we can figure out the minimum number of digits required. // Walk along until d has distinguished itself from upper and lower. - for i := 0; i < d.nd; i++ { - l := byte('0') // lower digit - if i < lower.nd { - l = lower.d[i] + for ui := 0; ; ui++ { + // lower, d, and upper may have the decimal points at different + // places. In this case upper is the longest, so we iterate from + // ui==0 and start li and mi at (possibly) -1. + mi := ui - upper.dp + d.dp + if mi >= d.nd { + break + } + li := ui - upper.dp + lower.dp + l := byte('0') // lower digit + if li >= 0 && li < lower.nd { + l = lower.d[li] + } + m := byte('0') // middle digit + if mi >= 0 { + m = d.d[mi] } - m := d.d[i] // middle digit u := byte('0') // upper digit - if i < upper.nd { - u = upper.d[i] + if ui < upper.nd { + u = upper.d[ui] } // Okay to round down (truncate) if lower has a different digit // or if lower is inclusive and is exactly the result of rounding // down (i.e., and we have reached the final digit of lower). - okdown := l != m || inclusive && i+1 == lower.nd + okdown := l != m || inclusive && li+1 == lower.nd + switch { + case upperdelta == 0 && m+1 < u: + // Example: + // m = 12345xxx + // u = 12347xxx + upperdelta = 2 + case upperdelta == 0 && m != u: + // Example: + // m = 12345xxx + // u = 12346xxx + upperdelta = 1 + case upperdelta == 1 && (m != '9' || u != '0'): + // Example: + // m = 1234598x + // u = 1234600x + upperdelta = 2 + } // Okay to round up if upper has a different digit and either upper // is inclusive or upper is bigger than the result of rounding up. - okup := m != u && (inclusive || m+1 < u || i+1 < upper.nd) + okup := upperdelta > 0 && (inclusive || upperdelta > 1 || ui+1 < upper.nd) // If it's okay to do either, then round to the nearest one. // If it's okay to do only one, do it. switch { case okdown && okup: - d.Round(i + 1) + d.Round(mi + 1) return case okdown: - d.RoundDown(i + 1) + d.RoundDown(mi + 1) return case okup: - d.RoundUp(i + 1) + d.RoundUp(mi + 1) return } } diff --git a/vendor/golang.org/x/exp/slices/slices.go b/vendor/golang.org/x/exp/slices/slices.go index 5e8158bba..46ceac343 100644 --- a/vendor/golang.org/x/exp/slices/slices.go +++ b/vendor/golang.org/x/exp/slices/slices.go @@ -209,25 +209,37 @@ func Insert[S ~[]E, E any](s S, i int, v ...E) S { return s } -// Delete removes the elements s[i:j] from s, returning the modified slice. -// Delete panics if s[i:j] is not a valid slice of s. -// Delete is O(len(s)-j), so if many items must be deleted, it is better to -// make a single call deleting them all together than to delete one at a time. -// Delete might not modify the elements s[len(s)-(j-i):len(s)]. If those -// elements contain pointers you might consider zeroing those elements so that -// objects they reference can be garbage collected. -func Delete[S ~[]E, E any](s S, i, j int) S { - _ = s[i:j] // bounds check +// clearSlice sets all elements up to the length of s to the zero value of E. +// We may use the builtin clear func instead, and remove clearSlice, when upgrading +// to Go 1.21+. +func clearSlice[S ~[]E, E any](s S) { + var zero E + for i := range s { + s[i] = zero + } +} - return append(s[:i], s[j:]...) +// Delete removes the elements s[i:j] from s, returning the modified slice. +// Delete panics if j > len(s) or s[i:j] is not a valid slice of s. +// Delete is O(len(s)-i), so if many items must be deleted, it is better to +// make a single call deleting them all together than to delete one at a time. +// Delete zeroes the elements s[len(s)-(j-i):len(s)]. +func Delete[S ~[]E, E any](s S, i, j int) S { + _ = s[i:j:len(s)] // bounds check + + if i == j { + return s + } + + oldlen := len(s) + s = append(s[:i], s[j:]...) + clearSlice(s[len(s):oldlen]) // zero/nil out the obsolete elements, for GC + return s } // DeleteFunc removes any elements from s for which del returns true, // returning the modified slice. -// When DeleteFunc removes m elements, it might not modify the elements -// s[len(s)-m:len(s)]. If those elements contain pointers you might consider -// zeroing those elements so that objects they reference can be garbage -// collected. +// DeleteFunc zeroes the elements between the new length and the original length. func DeleteFunc[S ~[]E, E any](s S, del func(E) bool) S { i := IndexFunc(s, del) if i == -1 { @@ -240,11 +252,13 @@ func DeleteFunc[S ~[]E, E any](s S, del func(E) bool) S { i++ } } + clearSlice(s[i:]) // zero/nil out the obsolete elements, for GC return s[:i] } // Replace replaces the elements s[i:j] by the given v, and returns the // modified slice. Replace panics if s[i:j] is not a valid slice of s. +// When len(v) < (j-i), Replace zeroes the elements between the new length and the original length. func Replace[S ~[]E, E any](s S, i, j int, v ...E) S { _ = s[i:j] // verify that i:j is a valid subslice @@ -272,6 +286,7 @@ func Replace[S ~[]E, E any](s S, i, j int, v ...E) S { if i+len(v) != j { copy(r[i+len(v):], s[j:]) } + clearSlice(s[tot:]) // zero/nil out the obsolete elements, for GC return r } @@ -345,9 +360,7 @@ func Clone[S ~[]E, E any](s S) S { // This is like the uniq command found on Unix. // Compact modifies the contents of the slice s and returns the modified slice, // which may have a smaller length. -// When Compact discards m elements in total, it might not modify the elements -// s[len(s)-m:len(s)]. If those elements contain pointers you might consider -// zeroing those elements so that objects they reference can be garbage collected. +// Compact zeroes the elements between the new length and the original length. func Compact[S ~[]E, E comparable](s S) S { if len(s) < 2 { return s @@ -361,11 +374,13 @@ func Compact[S ~[]E, E comparable](s S) S { i++ } } + clearSlice(s[i:]) // zero/nil out the obsolete elements, for GC return s[:i] } // CompactFunc is like [Compact] but uses an equality function to compare elements. // For runs of elements that compare equal, CompactFunc keeps the first one. +// CompactFunc zeroes the elements between the new length and the original length. func CompactFunc[S ~[]E, E any](s S, eq func(E, E) bool) S { if len(s) < 2 { return s @@ -379,6 +394,7 @@ func CompactFunc[S ~[]E, E any](s S, eq func(E, E) bool) S { i++ } } + clearSlice(s[i:]) // zero/nil out the obsolete elements, for GC return s[:i] } diff --git a/vendor/golang.org/x/sync/errgroup/errgroup.go b/vendor/golang.org/x/sync/errgroup/errgroup.go new file mode 100644 index 000000000..948a3ee63 --- /dev/null +++ b/vendor/golang.org/x/sync/errgroup/errgroup.go @@ -0,0 +1,135 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package errgroup provides synchronization, error propagation, and Context +// cancelation for groups of goroutines working on subtasks of a common task. +// +// [errgroup.Group] is related to [sync.WaitGroup] but adds handling of tasks +// returning errors. +package errgroup + +import ( + "context" + "fmt" + "sync" +) + +type token struct{} + +// A Group is a collection of goroutines working on subtasks that are part of +// the same overall task. +// +// A zero Group is valid, has no limit on the number of active goroutines, +// and does not cancel on error. +type Group struct { + cancel func(error) + + wg sync.WaitGroup + + sem chan token + + errOnce sync.Once + err error +} + +func (g *Group) done() { + if g.sem != nil { + <-g.sem + } + g.wg.Done() +} + +// WithContext returns a new Group and an associated Context derived from ctx. +// +// The derived Context is canceled the first time a function passed to Go +// returns a non-nil error or the first time Wait returns, whichever occurs +// first. +func WithContext(ctx context.Context) (*Group, context.Context) { + ctx, cancel := withCancelCause(ctx) + return &Group{cancel: cancel}, ctx +} + +// Wait blocks until all function calls from the Go method have returned, then +// returns the first non-nil error (if any) from them. +func (g *Group) Wait() error { + g.wg.Wait() + if g.cancel != nil { + g.cancel(g.err) + } + return g.err +} + +// Go calls the given function in a new goroutine. +// It blocks until the new goroutine can be added without the number of +// active goroutines in the group exceeding the configured limit. +// +// The first call to return a non-nil error cancels the group's context, if the +// group was created by calling WithContext. The error will be returned by Wait. +func (g *Group) Go(f func() error) { + if g.sem != nil { + g.sem <- token{} + } + + g.wg.Add(1) + go func() { + defer g.done() + + if err := f(); err != nil { + g.errOnce.Do(func() { + g.err = err + if g.cancel != nil { + g.cancel(g.err) + } + }) + } + }() +} + +// TryGo calls the given function in a new goroutine only if the number of +// active goroutines in the group is currently below the configured limit. +// +// The return value reports whether the goroutine was started. +func (g *Group) TryGo(f func() error) bool { + if g.sem != nil { + select { + case g.sem <- token{}: + // Note: this allows barging iff channels in general allow barging. + default: + return false + } + } + + g.wg.Add(1) + go func() { + defer g.done() + + if err := f(); err != nil { + g.errOnce.Do(func() { + g.err = err + if g.cancel != nil { + g.cancel(g.err) + } + }) + } + }() + return true +} + +// SetLimit limits the number of active goroutines in this group to at most n. +// A negative value indicates no limit. +// +// Any subsequent call to the Go method will block until it can add an active +// goroutine without exceeding the configured limit. +// +// The limit must not be modified while any goroutines in the group are active. +func (g *Group) SetLimit(n int) { + if n < 0 { + g.sem = nil + return + } + if len(g.sem) != 0 { + panic(fmt.Errorf("errgroup: modify limit while %v goroutines in the group are still active", len(g.sem))) + } + g.sem = make(chan token, n) +} diff --git a/vendor/golang.org/x/sync/errgroup/go120.go b/vendor/golang.org/x/sync/errgroup/go120.go new file mode 100644 index 000000000..f93c740b6 --- /dev/null +++ b/vendor/golang.org/x/sync/errgroup/go120.go @@ -0,0 +1,13 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.20 + +package errgroup + +import "context" + +func withCancelCause(parent context.Context) (context.Context, func(error)) { + return context.WithCancelCause(parent) +} diff --git a/vendor/golang.org/x/sync/errgroup/pre_go120.go b/vendor/golang.org/x/sync/errgroup/pre_go120.go new file mode 100644 index 000000000..88ce33434 --- /dev/null +++ b/vendor/golang.org/x/sync/errgroup/pre_go120.go @@ -0,0 +1,14 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !go1.20 + +package errgroup + +import "context" + +func withCancelCause(parent context.Context) (context.Context, func(error)) { + ctx, cancel := context.WithCancel(parent) + return ctx, func(error) { cancel() } +} diff --git a/vendor/golang.org/x/tools/go/buildutil/allpackages.go b/vendor/golang.org/x/tools/go/buildutil/allpackages.go deleted file mode 100644 index dfb8cd6c7..000000000 --- a/vendor/golang.org/x/tools/go/buildutil/allpackages.go +++ /dev/null @@ -1,195 +0,0 @@ -// Copyright 2014 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package buildutil provides utilities related to the go/build -// package in the standard library. -// -// All I/O is done via the build.Context file system interface, which must -// be concurrency-safe. -package buildutil // import "golang.org/x/tools/go/buildutil" - -import ( - "go/build" - "os" - "path/filepath" - "sort" - "strings" - "sync" -) - -// AllPackages returns the package path of each Go package in any source -// directory of the specified build context (e.g. $GOROOT or an element -// of $GOPATH). Errors are ignored. The results are sorted. -// All package paths are canonical, and thus may contain "/vendor/". -// -// The result may include import paths for directories that contain no -// *.go files, such as "archive" (in $GOROOT/src). -// -// All I/O is done via the build.Context file system interface, -// which must be concurrency-safe. -func AllPackages(ctxt *build.Context) []string { - var list []string - ForEachPackage(ctxt, func(pkg string, _ error) { - list = append(list, pkg) - }) - sort.Strings(list) - return list -} - -// ForEachPackage calls the found function with the package path of -// each Go package it finds in any source directory of the specified -// build context (e.g. $GOROOT or an element of $GOPATH). -// All package paths are canonical, and thus may contain "/vendor/". -// -// If the package directory exists but could not be read, the second -// argument to the found function provides the error. -// -// All I/O is done via the build.Context file system interface, -// which must be concurrency-safe. -func ForEachPackage(ctxt *build.Context, found func(importPath string, err error)) { - ch := make(chan item) - - var wg sync.WaitGroup - for _, root := range ctxt.SrcDirs() { - root := root - wg.Add(1) - go func() { - allPackages(ctxt, root, ch) - wg.Done() - }() - } - go func() { - wg.Wait() - close(ch) - }() - - // All calls to found occur in the caller's goroutine. - for i := range ch { - found(i.importPath, i.err) - } -} - -type item struct { - importPath string - err error // (optional) -} - -// We use a process-wide counting semaphore to limit -// the number of parallel calls to ReadDir. -var ioLimit = make(chan bool, 20) - -func allPackages(ctxt *build.Context, root string, ch chan<- item) { - root = filepath.Clean(root) + string(os.PathSeparator) - - var wg sync.WaitGroup - - var walkDir func(dir string) - walkDir = func(dir string) { - // Avoid .foo, _foo, and testdata directory trees. - base := filepath.Base(dir) - if base == "" || base[0] == '.' || base[0] == '_' || base == "testdata" { - return - } - - pkg := filepath.ToSlash(strings.TrimPrefix(dir, root)) - - // Prune search if we encounter any of these import paths. - switch pkg { - case "builtin": - return - } - - ioLimit <- true - files, err := ReadDir(ctxt, dir) - <-ioLimit - if pkg != "" || err != nil { - ch <- item{pkg, err} - } - for _, fi := range files { - fi := fi - if fi.IsDir() { - wg.Add(1) - go func() { - walkDir(filepath.Join(dir, fi.Name())) - wg.Done() - }() - } - } - } - - walkDir(root) - wg.Wait() -} - -// ExpandPatterns returns the set of packages matched by patterns, -// which may have the following forms: -// -// golang.org/x/tools/cmd/guru # a single package -// golang.org/x/tools/... # all packages beneath dir -// ... # the entire workspace. -// -// Order is significant: a pattern preceded by '-' removes matching -// packages from the set. For example, these patterns match all encoding -// packages except encoding/xml: -// -// encoding/... -encoding/xml -// -// A trailing slash in a pattern is ignored. (Path components of Go -// package names are separated by slash, not the platform's path separator.) -func ExpandPatterns(ctxt *build.Context, patterns []string) map[string]bool { - // TODO(adonovan): support other features of 'go list': - // - "std"/"cmd"/"all" meta-packages - // - "..." not at the end of a pattern - // - relative patterns using "./" or "../" prefix - - pkgs := make(map[string]bool) - doPkg := func(pkg string, neg bool) { - if neg { - delete(pkgs, pkg) - } else { - pkgs[pkg] = true - } - } - - // Scan entire workspace if wildcards are present. - // TODO(adonovan): opt: scan only the necessary subtrees of the workspace. - var all []string - for _, arg := range patterns { - if strings.HasSuffix(arg, "...") { - all = AllPackages(ctxt) - break - } - } - - for _, arg := range patterns { - if arg == "" { - continue - } - - neg := arg[0] == '-' - if neg { - arg = arg[1:] - } - - if arg == "..." { - // ... matches all packages - for _, pkg := range all { - doPkg(pkg, neg) - } - } else if dir := strings.TrimSuffix(arg, "/..."); dir != arg { - // dir/... matches all packages beneath dir - for _, pkg := range all { - if strings.HasPrefix(pkg, dir) && - (len(pkg) == len(dir) || pkg[len(dir)] == '/') { - doPkg(pkg, neg) - } - } - } else { - // single package - doPkg(strings.TrimSuffix(arg, "/"), neg) - } - } - - return pkgs -} diff --git a/vendor/golang.org/x/tools/go/buildutil/fakecontext.go b/vendor/golang.org/x/tools/go/buildutil/fakecontext.go deleted file mode 100644 index 763d18809..000000000 --- a/vendor/golang.org/x/tools/go/buildutil/fakecontext.go +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package buildutil - -import ( - "fmt" - "go/build" - "io" - "os" - "path" - "path/filepath" - "sort" - "strings" - "time" -) - -// FakeContext returns a build.Context for the fake file tree specified -// by pkgs, which maps package import paths to a mapping from file base -// names to contents. -// -// The fake Context has a GOROOT of "/go" and no GOPATH, and overrides -// the necessary file access methods to read from memory instead of the -// real file system. -// -// Unlike a real file tree, the fake one has only two levels---packages -// and files---so ReadDir("/go/src/") returns all packages under -// /go/src/ including, for instance, "math" and "math/big". -// ReadDir("/go/src/math/big") would return all the files in the -// "math/big" package. -func FakeContext(pkgs map[string]map[string]string) *build.Context { - clean := func(filename string) string { - f := path.Clean(filepath.ToSlash(filename)) - // Removing "/go/src" while respecting segment - // boundaries has this unfortunate corner case: - if f == "/go/src" { - return "" - } - return strings.TrimPrefix(f, "/go/src/") - } - - ctxt := build.Default // copy - ctxt.GOROOT = "/go" - ctxt.GOPATH = "" - ctxt.Compiler = "gc" - ctxt.IsDir = func(dir string) bool { - dir = clean(dir) - if dir == "" { - return true // needed by (*build.Context).SrcDirs - } - return pkgs[dir] != nil - } - ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) { - dir = clean(dir) - var fis []os.FileInfo - if dir == "" { - // enumerate packages - for importPath := range pkgs { - fis = append(fis, fakeDirInfo(importPath)) - } - } else { - // enumerate files of package - for basename := range pkgs[dir] { - fis = append(fis, fakeFileInfo(basename)) - } - } - sort.Sort(byName(fis)) - return fis, nil - } - ctxt.OpenFile = func(filename string) (io.ReadCloser, error) { - filename = clean(filename) - dir, base := path.Split(filename) - content, ok := pkgs[path.Clean(dir)][base] - if !ok { - return nil, fmt.Errorf("file not found: %s", filename) - } - return io.NopCloser(strings.NewReader(content)), nil - } - ctxt.IsAbsPath = func(path string) bool { - path = filepath.ToSlash(path) - // Don't rely on the default (filepath.Path) since on - // Windows, it reports virtual paths as non-absolute. - return strings.HasPrefix(path, "/") - } - return &ctxt -} - -type byName []os.FileInfo - -func (s byName) Len() int { return len(s) } -func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] } -func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() } - -type fakeFileInfo string - -func (fi fakeFileInfo) Name() string { return string(fi) } -func (fakeFileInfo) Sys() interface{} { return nil } -func (fakeFileInfo) ModTime() time.Time { return time.Time{} } -func (fakeFileInfo) IsDir() bool { return false } -func (fakeFileInfo) Size() int64 { return 0 } -func (fakeFileInfo) Mode() os.FileMode { return 0644 } - -type fakeDirInfo string - -func (fd fakeDirInfo) Name() string { return string(fd) } -func (fakeDirInfo) Sys() interface{} { return nil } -func (fakeDirInfo) ModTime() time.Time { return time.Time{} } -func (fakeDirInfo) IsDir() bool { return true } -func (fakeDirInfo) Size() int64 { return 0 } -func (fakeDirInfo) Mode() os.FileMode { return 0755 } diff --git a/vendor/golang.org/x/tools/go/buildutil/overlay.go b/vendor/golang.org/x/tools/go/buildutil/overlay.go deleted file mode 100644 index 7e371658d..000000000 --- a/vendor/golang.org/x/tools/go/buildutil/overlay.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package buildutil - -import ( - "bufio" - "bytes" - "fmt" - "go/build" - "io" - "path/filepath" - "strconv" - "strings" -) - -// OverlayContext overlays a build.Context with additional files from -// a map. Files in the map take precedence over other files. -// -// In addition to plain string comparison, two file names are -// considered equal if their base names match and their directory -// components point at the same directory on the file system. That is, -// symbolic links are followed for directories, but not files. -// -// A common use case for OverlayContext is to allow editors to pass in -// a set of unsaved, modified files. -// -// Currently, only the Context.OpenFile function will respect the -// overlay. This may change in the future. -func OverlayContext(orig *build.Context, overlay map[string][]byte) *build.Context { - // TODO(dominikh): Implement IsDir, HasSubdir and ReadDir - - rc := func(data []byte) (io.ReadCloser, error) { - return io.NopCloser(bytes.NewBuffer(data)), nil - } - - copy := *orig // make a copy - ctxt := © - ctxt.OpenFile = func(path string) (io.ReadCloser, error) { - // Fast path: names match exactly. - if content, ok := overlay[path]; ok { - return rc(content) - } - - // Slow path: check for same file under a different - // alias, perhaps due to a symbolic link. - for filename, content := range overlay { - if sameFile(path, filename) { - return rc(content) - } - } - - return OpenFile(orig, path) - } - return ctxt -} - -// ParseOverlayArchive parses an archive containing Go files and their -// contents. The result is intended to be used with OverlayContext. -// -// # Archive format -// -// The archive consists of a series of files. Each file consists of a -// name, a decimal file size and the file contents, separated by -// newlines. No newline follows after the file contents. -func ParseOverlayArchive(archive io.Reader) (map[string][]byte, error) { - overlay := make(map[string][]byte) - r := bufio.NewReader(archive) - for { - // Read file name. - filename, err := r.ReadString('\n') - if err != nil { - if err == io.EOF { - break // OK - } - return nil, fmt.Errorf("reading archive file name: %v", err) - } - filename = filepath.Clean(strings.TrimSpace(filename)) - - // Read file size. - sz, err := r.ReadString('\n') - if err != nil { - return nil, fmt.Errorf("reading size of archive file %s: %v", filename, err) - } - sz = strings.TrimSpace(sz) - size, err := strconv.ParseUint(sz, 10, 32) - if err != nil { - return nil, fmt.Errorf("parsing size of archive file %s: %v", filename, err) - } - - // Read file content. - content := make([]byte, size) - if _, err := io.ReadFull(r, content); err != nil { - return nil, fmt.Errorf("reading archive file %s: %v", filename, err) - } - overlay[filename] = content - } - - return overlay, nil -} diff --git a/vendor/golang.org/x/tools/go/buildutil/tags.go b/vendor/golang.org/x/tools/go/buildutil/tags.go deleted file mode 100644 index 32c8d1424..000000000 --- a/vendor/golang.org/x/tools/go/buildutil/tags.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package buildutil - -// This duplicated logic must be kept in sync with that from go build: -// $GOROOT/src/cmd/go/internal/work/build.go (tagsFlag.Set) -// $GOROOT/src/cmd/go/internal/base/flag.go (StringsFlag.Set) -// $GOROOT/src/cmd/internal/quoted/quoted.go (isSpaceByte, Split) - -import ( - "fmt" - "strings" -) - -const TagsFlagDoc = "a list of `build tags` to consider satisfied during the build. " + - "For more information about build tags, see the description of " + - "build constraints in the documentation for the go/build package" - -// TagsFlag is an implementation of the flag.Value and flag.Getter interfaces that parses -// a flag value the same as go build's -tags flag and populates a []string slice. -// -// See $GOROOT/src/go/build/doc.go for description of build tags. -// See $GOROOT/src/cmd/go/doc.go for description of 'go build -tags' flag. -// -// Example: -// -// flag.Var((*buildutil.TagsFlag)(&build.Default.BuildTags), "tags", buildutil.TagsFlagDoc) -type TagsFlag []string - -func (v *TagsFlag) Set(s string) error { - // See $GOROOT/src/cmd/go/internal/work/build.go (tagsFlag.Set) - // For compatibility with Go 1.12 and earlier, allow "-tags='a b c'" or even just "-tags='a'". - if strings.Contains(s, " ") || strings.Contains(s, "'") { - var err error - *v, err = splitQuotedFields(s) - if *v == nil { - *v = []string{} - } - return err - } - - // Starting in Go 1.13, the -tags flag is a comma-separated list of build tags. - *v = []string{} - for _, s := range strings.Split(s, ",") { - if s != "" { - *v = append(*v, s) - } - } - return nil -} - -func (v *TagsFlag) Get() interface{} { return *v } - -func splitQuotedFields(s string) ([]string, error) { - // See $GOROOT/src/cmd/internal/quoted/quoted.go (Split) - // This must remain in sync with that logic. - var f []string - for len(s) > 0 { - for len(s) > 0 && isSpaceByte(s[0]) { - s = s[1:] - } - if len(s) == 0 { - break - } - // Accepted quoted string. No unescaping inside. - if s[0] == '"' || s[0] == '\'' { - quote := s[0] - s = s[1:] - i := 0 - for i < len(s) && s[i] != quote { - i++ - } - if i >= len(s) { - return nil, fmt.Errorf("unterminated %c string", quote) - } - f = append(f, s[:i]) - s = s[i+1:] - continue - } - i := 0 - for i < len(s) && !isSpaceByte(s[i]) { - i++ - } - f = append(f, s[:i]) - s = s[i:] - } - return f, nil -} - -func (v *TagsFlag) String() string { - return "" -} - -func isSpaceByte(c byte) bool { - // See $GOROOT/src/cmd/internal/quoted/quoted.go (isSpaceByte, Split) - // This list must remain in sync with that. - return c == ' ' || c == '\t' || c == '\n' || c == '\r' -} diff --git a/vendor/golang.org/x/tools/go/buildutil/util.go b/vendor/golang.org/x/tools/go/buildutil/util.go deleted file mode 100644 index bee6390de..000000000 --- a/vendor/golang.org/x/tools/go/buildutil/util.go +++ /dev/null @@ -1,209 +0,0 @@ -// Copyright 2014 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package buildutil - -import ( - "fmt" - "go/ast" - "go/build" - "go/parser" - "go/token" - "io" - "io/ioutil" - "os" - "path" - "path/filepath" - "strings" -) - -// ParseFile behaves like parser.ParseFile, -// but uses the build context's file system interface, if any. -// -// If file is not absolute (as defined by IsAbsPath), the (dir, file) -// components are joined using JoinPath; dir must be absolute. -// -// The displayPath function, if provided, is used to transform the -// filename that will be attached to the ASTs. -// -// TODO(adonovan): call this from go/loader.parseFiles when the tree thaws. -func ParseFile(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, file string, mode parser.Mode) (*ast.File, error) { - if !IsAbsPath(ctxt, file) { - file = JoinPath(ctxt, dir, file) - } - rd, err := OpenFile(ctxt, file) - if err != nil { - return nil, err - } - defer rd.Close() // ignore error - if displayPath != nil { - file = displayPath(file) - } - return parser.ParseFile(fset, file, rd, mode) -} - -// ContainingPackage returns the package containing filename. -// -// If filename is not absolute, it is interpreted relative to working directory dir. -// All I/O is via the build context's file system interface, if any. -// -// The '...Files []string' fields of the resulting build.Package are not -// populated (build.FindOnly mode). -func ContainingPackage(ctxt *build.Context, dir, filename string) (*build.Package, error) { - if !IsAbsPath(ctxt, filename) { - filename = JoinPath(ctxt, dir, filename) - } - - // We must not assume the file tree uses - // "/" always, - // `\` always, - // or os.PathSeparator (which varies by platform), - // but to make any progress, we are forced to assume that - // paths will not use `\` unless the PathSeparator - // is also `\`, thus we can rely on filepath.ToSlash for some sanity. - - dirSlash := path.Dir(filepath.ToSlash(filename)) + "/" - - // We assume that no source root (GOPATH[i] or GOROOT) contains any other. - for _, srcdir := range ctxt.SrcDirs() { - srcdirSlash := filepath.ToSlash(srcdir) + "/" - if importPath, ok := HasSubdir(ctxt, srcdirSlash, dirSlash); ok { - return ctxt.Import(importPath, dir, build.FindOnly) - } - } - - return nil, fmt.Errorf("can't find package containing %s", filename) -} - -// -- Effective methods of file system interface ------------------------- - -// (go/build.Context defines these as methods, but does not export them.) - -// HasSubdir calls ctxt.HasSubdir (if not nil) or else uses -// the local file system to answer the question. -func HasSubdir(ctxt *build.Context, root, dir string) (rel string, ok bool) { - if f := ctxt.HasSubdir; f != nil { - return f(root, dir) - } - - // Try using paths we received. - if rel, ok = hasSubdir(root, dir); ok { - return - } - - // Try expanding symlinks and comparing - // expanded against unexpanded and - // expanded against expanded. - rootSym, _ := filepath.EvalSymlinks(root) - dirSym, _ := filepath.EvalSymlinks(dir) - - if rel, ok = hasSubdir(rootSym, dir); ok { - return - } - if rel, ok = hasSubdir(root, dirSym); ok { - return - } - return hasSubdir(rootSym, dirSym) -} - -func hasSubdir(root, dir string) (rel string, ok bool) { - const sep = string(filepath.Separator) - root = filepath.Clean(root) - if !strings.HasSuffix(root, sep) { - root += sep - } - - dir = filepath.Clean(dir) - if !strings.HasPrefix(dir, root) { - return "", false - } - - return filepath.ToSlash(dir[len(root):]), true -} - -// FileExists returns true if the specified file exists, -// using the build context's file system interface. -func FileExists(ctxt *build.Context, path string) bool { - if ctxt.OpenFile != nil { - r, err := ctxt.OpenFile(path) - if err != nil { - return false - } - r.Close() // ignore error - return true - } - _, err := os.Stat(path) - return err == nil -} - -// OpenFile behaves like os.Open, -// but uses the build context's file system interface, if any. -func OpenFile(ctxt *build.Context, path string) (io.ReadCloser, error) { - if ctxt.OpenFile != nil { - return ctxt.OpenFile(path) - } - return os.Open(path) -} - -// IsAbsPath behaves like filepath.IsAbs, -// but uses the build context's file system interface, if any. -func IsAbsPath(ctxt *build.Context, path string) bool { - if ctxt.IsAbsPath != nil { - return ctxt.IsAbsPath(path) - } - return filepath.IsAbs(path) -} - -// JoinPath behaves like filepath.Join, -// but uses the build context's file system interface, if any. -func JoinPath(ctxt *build.Context, path ...string) string { - if ctxt.JoinPath != nil { - return ctxt.JoinPath(path...) - } - return filepath.Join(path...) -} - -// IsDir behaves like os.Stat plus IsDir, -// but uses the build context's file system interface, if any. -func IsDir(ctxt *build.Context, path string) bool { - if ctxt.IsDir != nil { - return ctxt.IsDir(path) - } - fi, err := os.Stat(path) - return err == nil && fi.IsDir() -} - -// ReadDir behaves like ioutil.ReadDir, -// but uses the build context's file system interface, if any. -func ReadDir(ctxt *build.Context, path string) ([]os.FileInfo, error) { - if ctxt.ReadDir != nil { - return ctxt.ReadDir(path) - } - return ioutil.ReadDir(path) -} - -// SplitPathList behaves like filepath.SplitList, -// but uses the build context's file system interface, if any. -func SplitPathList(ctxt *build.Context, s string) []string { - if ctxt.SplitPathList != nil { - return ctxt.SplitPathList(s) - } - return filepath.SplitList(s) -} - -// sameFile returns true if x and y have the same basename and denote -// the same file. -func sameFile(x, y string) bool { - if path.Clean(x) == path.Clean(y) { - return true - } - if filepath.Base(x) == filepath.Base(y) { // (optimisation) - if xi, err := os.Stat(x); err == nil { - if yi, err := os.Stat(y); err == nil { - return os.SameFile(xi, yi) - } - } - } - return false -} diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo.go deleted file mode 100644 index 697974bb9..000000000 --- a/vendor/golang.org/x/tools/go/internal/cgo/cgo.go +++ /dev/null @@ -1,219 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package cgo handles cgo preprocessing of files containing `import "C"`. -// -// DESIGN -// -// The approach taken is to run the cgo processor on the package's -// CgoFiles and parse the output, faking the filenames of the -// resulting ASTs so that the synthetic file containing the C types is -// called "C" (e.g. "~/go/src/net/C") and the preprocessed files -// have their original names (e.g. "~/go/src/net/cgo_unix.go"), -// not the names of the actual temporary files. -// -// The advantage of this approach is its fidelity to 'go build'. The -// downside is that the token.Position.Offset for each AST node is -// incorrect, being an offset within the temporary file. Line numbers -// should still be correct because of the //line comments. -// -// The logic of this file is mostly plundered from the 'go build' -// tool, which also invokes the cgo preprocessor. -// -// -// REJECTED ALTERNATIVE -// -// An alternative approach that we explored is to extend go/types' -// Importer mechanism to provide the identity of the importing package -// so that each time `import "C"` appears it resolves to a different -// synthetic package containing just the objects needed in that case. -// The loader would invoke cgo but parse only the cgo_types.go file -// defining the package-level objects, discarding the other files -// resulting from preprocessing. -// -// The benefit of this approach would have been that source-level -// syntax information would correspond exactly to the original cgo -// file, with no preprocessing involved, making source tools like -// godoc, guru, and eg happy. However, the approach was rejected -// due to the additional complexity it would impose on go/types. (It -// made for a beautiful demo, though.) -// -// cgo files, despite their *.go extension, are not legal Go source -// files per the specification since they may refer to unexported -// members of package "C" such as C.int. Also, a function such as -// C.getpwent has in effect two types, one matching its C type and one -// which additionally returns (errno C.int). The cgo preprocessor -// uses name mangling to distinguish these two functions in the -// processed code, but go/types would need to duplicate this logic in -// its handling of function calls, analogous to the treatment of map -// lookups in which y=m[k] and y,ok=m[k] are both legal. - -package cgo - -import ( - "fmt" - "go/ast" - "go/build" - "go/parser" - "go/token" - "log" - "os" - "os/exec" - "path/filepath" - "regexp" - "strings" -) - -// ProcessFiles invokes the cgo preprocessor on bp.CgoFiles, parses -// the output and returns the resulting ASTs. -func ProcessFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) { - tmpdir, err := os.MkdirTemp("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C") - if err != nil { - return nil, err - } - defer os.RemoveAll(tmpdir) - - pkgdir := bp.Dir - if DisplayPath != nil { - pkgdir = DisplayPath(pkgdir) - } - - cgoFiles, cgoDisplayFiles, err := Run(bp, pkgdir, tmpdir, false) - if err != nil { - return nil, err - } - var files []*ast.File - for i := range cgoFiles { - rd, err := os.Open(cgoFiles[i]) - if err != nil { - return nil, err - } - display := filepath.Join(bp.Dir, cgoDisplayFiles[i]) - f, err := parser.ParseFile(fset, display, rd, mode) - rd.Close() - if err != nil { - return nil, err - } - files = append(files, f) - } - return files, nil -} - -var cgoRe = regexp.MustCompile(`[/\\:]`) - -// Run invokes the cgo preprocessor on bp.CgoFiles and returns two -// lists of files: the resulting processed files (in temporary -// directory tmpdir) and the corresponding names of the unprocessed files. -// -// Run is adapted from (*builder).cgo in -// $GOROOT/src/cmd/go/build.go, but these features are unsupported: -// Objective C, CGOPKGPATH, CGO_FLAGS. -// -// If useabs is set to true, absolute paths of the bp.CgoFiles will be passed in -// to the cgo preprocessor. This in turn will set the // line comments -// referring to those files to use absolute paths. This is needed for -// go/packages using the legacy go list support so it is able to find -// the original files. -func Run(bp *build.Package, pkgdir, tmpdir string, useabs bool) (files, displayFiles []string, err error) { - cgoCPPFLAGS, _, _, _ := cflags(bp, true) - _, cgoexeCFLAGS, _, _ := cflags(bp, false) - - if len(bp.CgoPkgConfig) > 0 { - pcCFLAGS, err := pkgConfigFlags(bp) - if err != nil { - return nil, nil, err - } - cgoCPPFLAGS = append(cgoCPPFLAGS, pcCFLAGS...) - } - - // Allows including _cgo_export.h from .[ch] files in the package. - cgoCPPFLAGS = append(cgoCPPFLAGS, "-I", tmpdir) - - // _cgo_gotypes.go (displayed "C") contains the type definitions. - files = append(files, filepath.Join(tmpdir, "_cgo_gotypes.go")) - displayFiles = append(displayFiles, "C") - for _, fn := range bp.CgoFiles { - // "foo.cgo1.go" (displayed "foo.go") is the processed Go source. - f := cgoRe.ReplaceAllString(fn[:len(fn)-len("go")], "_") - files = append(files, filepath.Join(tmpdir, f+"cgo1.go")) - displayFiles = append(displayFiles, fn) - } - - var cgoflags []string - if bp.Goroot && bp.ImportPath == "runtime/cgo" { - cgoflags = append(cgoflags, "-import_runtime_cgo=false") - } - if bp.Goroot && bp.ImportPath == "runtime/race" || bp.ImportPath == "runtime/cgo" { - cgoflags = append(cgoflags, "-import_syscall=false") - } - - var cgoFiles []string = bp.CgoFiles - if useabs { - cgoFiles = make([]string, len(bp.CgoFiles)) - for i := range cgoFiles { - cgoFiles[i] = filepath.Join(pkgdir, bp.CgoFiles[i]) - } - } - - args := stringList( - "go", "tool", "cgo", "-objdir", tmpdir, cgoflags, "--", - cgoCPPFLAGS, cgoexeCFLAGS, cgoFiles, - ) - if false { - log.Printf("Running cgo for package %q: %s (dir=%s)", bp.ImportPath, args, pkgdir) - } - cmd := exec.Command(args[0], args[1:]...) - cmd.Dir = pkgdir - cmd.Env = append(os.Environ(), "PWD="+pkgdir) - cmd.Stdout = os.Stderr - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - return nil, nil, fmt.Errorf("cgo failed: %s: %s", args, err) - } - - return files, displayFiles, nil -} - -// -- unmodified from 'go build' --------------------------------------- - -// Return the flags to use when invoking the C or C++ compilers, or cgo. -func cflags(p *build.Package, def bool) (cppflags, cflags, cxxflags, ldflags []string) { - var defaults string - if def { - defaults = "-g -O2" - } - - cppflags = stringList(envList("CGO_CPPFLAGS", ""), p.CgoCPPFLAGS) - cflags = stringList(envList("CGO_CFLAGS", defaults), p.CgoCFLAGS) - cxxflags = stringList(envList("CGO_CXXFLAGS", defaults), p.CgoCXXFLAGS) - ldflags = stringList(envList("CGO_LDFLAGS", defaults), p.CgoLDFLAGS) - return -} - -// envList returns the value of the given environment variable broken -// into fields, using the default value when the variable is empty. -func envList(key, def string) []string { - v := os.Getenv(key) - if v == "" { - v = def - } - return strings.Fields(v) -} - -// stringList's arguments should be a sequence of string or []string values. -// stringList flattens them into a single []string. -func stringList(args ...interface{}) []string { - var x []string - for _, arg := range args { - switch arg := arg.(type) { - case []string: - x = append(x, arg...) - case string: - x = append(x, arg) - default: - panic("stringList: invalid argument") - } - } - return x -} diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go deleted file mode 100644 index 2455be54f..000000000 --- a/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cgo - -import ( - "errors" - "fmt" - "go/build" - "os/exec" - "strings" -) - -// pkgConfig runs pkg-config with the specified arguments and returns the flags it prints. -func pkgConfig(mode string, pkgs []string) (flags []string, err error) { - cmd := exec.Command("pkg-config", append([]string{mode}, pkgs...)...) - out, err := cmd.Output() - if err != nil { - s := fmt.Sprintf("%s failed: %v", strings.Join(cmd.Args, " "), err) - if len(out) > 0 { - s = fmt.Sprintf("%s: %s", s, out) - } - if err, ok := err.(*exec.ExitError); ok && len(err.Stderr) > 0 { - s = fmt.Sprintf("%s\nstderr:\n%s", s, err.Stderr) - } - return nil, errors.New(s) - } - if len(out) > 0 { - flags = strings.Fields(string(out)) - } - return -} - -// pkgConfigFlags calls pkg-config if needed and returns the cflags -// needed to build the package. -func pkgConfigFlags(p *build.Package) (cflags []string, err error) { - if len(p.CgoPkgConfig) == 0 { - return nil, nil - } - return pkgConfig("--cflags", p.CgoPkgConfig) -} diff --git a/vendor/golang.org/x/tools/go/loader/doc.go b/vendor/golang.org/x/tools/go/loader/doc.go deleted file mode 100644 index e35b1fd7d..000000000 --- a/vendor/golang.org/x/tools/go/loader/doc.go +++ /dev/null @@ -1,202 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package loader loads a complete Go program from source code, parsing -// and type-checking the initial packages plus their transitive closure -// of dependencies. The ASTs and the derived facts are retained for -// later use. -// -// Deprecated: This is an older API and does not have support -// for modules. Use golang.org/x/tools/go/packages instead. -// -// The package defines two primary types: Config, which specifies a -// set of initial packages to load and various other options; and -// Program, which is the result of successfully loading the packages -// specified by a configuration. -// -// The configuration can be set directly, but *Config provides various -// convenience methods to simplify the common cases, each of which can -// be called any number of times. Finally, these are followed by a -// call to Load() to actually load and type-check the program. -// -// var conf loader.Config -// -// // Use the command-line arguments to specify -// // a set of initial packages to load from source. -// // See FromArgsUsage for help. -// rest, err := conf.FromArgs(os.Args[1:], wantTests) -// -// // Parse the specified files and create an ad hoc package with path "foo". -// // All files must have the same 'package' declaration. -// conf.CreateFromFilenames("foo", "foo.go", "bar.go") -// -// // Create an ad hoc package with path "foo" from -// // the specified already-parsed files. -// // All ASTs must have the same 'package' declaration. -// conf.CreateFromFiles("foo", parsedFiles) -// -// // Add "runtime" to the set of packages to be loaded. -// conf.Import("runtime") -// -// // Adds "fmt" and "fmt_test" to the set of packages -// // to be loaded. "fmt" will include *_test.go files. -// conf.ImportWithTests("fmt") -// -// // Finally, load all the packages specified by the configuration. -// prog, err := conf.Load() -// -// See examples_test.go for examples of API usage. -// -// # CONCEPTS AND TERMINOLOGY -// -// The WORKSPACE is the set of packages accessible to the loader. The -// workspace is defined by Config.Build, a *build.Context. The -// default context treats subdirectories of $GOROOT and $GOPATH as -// packages, but this behavior may be overridden. -// -// An AD HOC package is one specified as a set of source files on the -// command line. In the simplest case, it may consist of a single file -// such as $GOROOT/src/net/http/triv.go. -// -// EXTERNAL TEST packages are those comprised of a set of *_test.go -// files all with the same 'package foo_test' declaration, all in the -// same directory. (go/build.Package calls these files XTestFiles.) -// -// An IMPORTABLE package is one that can be referred to by some import -// spec. Every importable package is uniquely identified by its -// PACKAGE PATH or just PATH, a string such as "fmt", "encoding/json", -// or "cmd/vendor/golang.org/x/arch/x86/x86asm". A package path -// typically denotes a subdirectory of the workspace. -// -// An import declaration uses an IMPORT PATH to refer to a package. -// Most import declarations use the package path as the import path. -// -// Due to VENDORING (https://golang.org/s/go15vendor), the -// interpretation of an import path may depend on the directory in which -// it appears. To resolve an import path to a package path, go/build -// must search the enclosing directories for a subdirectory named -// "vendor". -// -// ad hoc packages and external test packages are NON-IMPORTABLE. The -// path of an ad hoc package is inferred from the package -// declarations of its files and is therefore not a unique package key. -// For example, Config.CreatePkgs may specify two initial ad hoc -// packages, both with path "main". -// -// An AUGMENTED package is an importable package P plus all the -// *_test.go files with same 'package foo' declaration as P. -// (go/build.Package calls these files TestFiles.) -// -// The INITIAL packages are those specified in the configuration. A -// DEPENDENCY is a package loaded to satisfy an import in an initial -// package or another dependency. -package loader - -// IMPLEMENTATION NOTES -// -// 'go test', in-package test files, and import cycles -// --------------------------------------------------- -// -// An external test package may depend upon members of the augmented -// package that are not in the unaugmented package, such as functions -// that expose internals. (See bufio/export_test.go for an example.) -// So, the loader must ensure that for each external test package -// it loads, it also augments the corresponding non-test package. -// -// The import graph over n unaugmented packages must be acyclic; the -// import graph over n-1 unaugmented packages plus one augmented -// package must also be acyclic. ('go test' relies on this.) But the -// import graph over n augmented packages may contain cycles. -// -// First, all the (unaugmented) non-test packages and their -// dependencies are imported in the usual way; the loader reports an -// error if it detects an import cycle. -// -// Then, each package P for which testing is desired is augmented by -// the list P' of its in-package test files, by calling -// (*types.Checker).Files. This arrangement ensures that P' may -// reference definitions within P, but P may not reference definitions -// within P'. Furthermore, P' may import any other package, including -// ones that depend upon P, without an import cycle error. -// -// Consider two packages A and B, both of which have lists of -// in-package test files we'll call A' and B', and which have the -// following import graph edges: -// B imports A -// B' imports A -// A' imports B -// This last edge would be expected to create an error were it not -// for the special type-checking discipline above. -// Cycles of size greater than two are possible. For example: -// compress/bzip2/bzip2_test.go (package bzip2) imports "io/ioutil" -// io/ioutil/tempfile_test.go (package ioutil) imports "regexp" -// regexp/exec_test.go (package regexp) imports "compress/bzip2" -// -// -// Concurrency -// ----------- -// -// Let us define the import dependency graph as follows. Each node is a -// list of files passed to (Checker).Files at once. Many of these lists -// are the production code of an importable Go package, so those nodes -// are labelled by the package's path. The remaining nodes are -// ad hoc packages and lists of in-package *_test.go files that augment -// an importable package; those nodes have no label. -// -// The edges of the graph represent import statements appearing within a -// file. An edge connects a node (a list of files) to the node it -// imports, which is importable and thus always labelled. -// -// Loading is controlled by this dependency graph. -// -// To reduce I/O latency, we start loading a package's dependencies -// asynchronously as soon as we've parsed its files and enumerated its -// imports (scanImports). This performs a preorder traversal of the -// import dependency graph. -// -// To exploit hardware parallelism, we type-check unrelated packages in -// parallel, where "unrelated" means not ordered by the partial order of -// the import dependency graph. -// -// We use a concurrency-safe non-blocking cache (importer.imported) to -// record the results of type-checking, whether success or failure. An -// entry is created in this cache by startLoad the first time the -// package is imported. The first goroutine to request an entry becomes -// responsible for completing the task and broadcasting completion to -// subsequent requestors, which block until then. -// -// Type checking occurs in (parallel) postorder: we cannot type-check a -// set of files until we have loaded and type-checked all of their -// immediate dependencies (and thus all of their transitive -// dependencies). If the input were guaranteed free of import cycles, -// this would be trivial: we could simply wait for completion of the -// dependencies and then invoke the typechecker. -// -// But as we saw in the 'go test' section above, some cycles in the -// import graph over packages are actually legal, so long as the -// cycle-forming edge originates in the in-package test files that -// augment the package. This explains why the nodes of the import -// dependency graph are not packages, but lists of files: the unlabelled -// nodes avoid the cycles. Consider packages A and B where B imports A -// and A's in-package tests AT import B. The naively constructed import -// graph over packages would contain a cycle (A+AT) --> B --> (A+AT) but -// the graph over lists of files is AT --> B --> A, where AT is an -// unlabelled node. -// -// Awaiting completion of the dependencies in a cyclic graph would -// deadlock, so we must materialize the import dependency graph (as -// importer.graph) and check whether each import edge forms a cycle. If -// x imports y, and the graph already contains a path from y to x, then -// there is an import cycle, in which case the processing of x must not -// wait for the completion of processing of y. -// -// When the type-checker makes a callback (doImport) to the loader for a -// given import edge, there are two possible cases. In the normal case, -// the dependency has already been completely type-checked; doImport -// does a cache lookup and returns it. In the cyclic case, the entry in -// the cache is still necessarily incomplete, indicating a cycle. We -// perform the cycle check again to obtain the error message, and return -// the error. -// -// The result of using concurrency is about a 2.5x speedup for stdlib_test. diff --git a/vendor/golang.org/x/tools/go/loader/loader.go b/vendor/golang.org/x/tools/go/loader/loader.go deleted file mode 100644 index 013c0f505..000000000 --- a/vendor/golang.org/x/tools/go/loader/loader.go +++ /dev/null @@ -1,1066 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package loader - -// See doc.go for package documentation and implementation notes. - -import ( - "errors" - "fmt" - "go/ast" - "go/build" - "go/parser" - "go/token" - "go/types" - "os" - "path/filepath" - "sort" - "strings" - "sync" - "time" - - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/internal/cgo" - "golang.org/x/tools/internal/versions" -) - -var ignoreVendor build.ImportMode - -const trace = false // show timing info for type-checking - -// Config specifies the configuration for loading a whole program from -// Go source code. -// The zero value for Config is a ready-to-use default configuration. -type Config struct { - // Fset is the file set for the parser to use when loading the - // program. If nil, it may be lazily initialized by any - // method of Config. - Fset *token.FileSet - - // ParserMode specifies the mode to be used by the parser when - // loading source packages. - ParserMode parser.Mode - - // TypeChecker contains options relating to the type checker. - // - // The supplied IgnoreFuncBodies is not used; the effective - // value comes from the TypeCheckFuncBodies func below. - // The supplied Import function is not used either. - TypeChecker types.Config - - // TypeCheckFuncBodies is a predicate over package paths. - // A package for which the predicate is false will - // have its package-level declarations type checked, but not - // its function bodies; this can be used to quickly load - // dependencies from source. If nil, all func bodies are type - // checked. - TypeCheckFuncBodies func(path string) bool - - // If Build is non-nil, it is used to locate source packages. - // Otherwise &build.Default is used. - // - // By default, cgo is invoked to preprocess Go files that - // import the fake package "C". This behaviour can be - // disabled by setting CGO_ENABLED=0 in the environment prior - // to startup, or by setting Build.CgoEnabled=false. - Build *build.Context - - // The current directory, used for resolving relative package - // references such as "./go/loader". If empty, os.Getwd will be - // used instead. - Cwd string - - // If DisplayPath is non-nil, it is used to transform each - // file name obtained from Build.Import(). This can be used - // to prevent a virtualized build.Config's file names from - // leaking into the user interface. - DisplayPath func(path string) string - - // If AllowErrors is true, Load will return a Program even - // if some of the its packages contained I/O, parser or type - // errors; such errors are accessible via PackageInfo.Errors. If - // false, Load will fail if any package had an error. - AllowErrors bool - - // CreatePkgs specifies a list of non-importable initial - // packages to create. The resulting packages will appear in - // the corresponding elements of the Program.Created slice. - CreatePkgs []PkgSpec - - // ImportPkgs specifies a set of initial packages to load. - // The map keys are package paths. - // - // The map value indicates whether to load tests. If true, Load - // will add and type-check two lists of files to the package: - // non-test files followed by in-package *_test.go files. In - // addition, it will append the external test package (if any) - // to Program.Created. - ImportPkgs map[string]bool - - // FindPackage is called during Load to create the build.Package - // for a given import path from a given directory. - // If FindPackage is nil, (*build.Context).Import is used. - // A client may use this hook to adapt to a proprietary build - // system that does not follow the "go build" layout - // conventions, for example. - // - // It must be safe to call concurrently from multiple goroutines. - FindPackage func(ctxt *build.Context, importPath, fromDir string, mode build.ImportMode) (*build.Package, error) - - // AfterTypeCheck is called immediately after a list of files - // has been type-checked and appended to info.Files. - // - // This optional hook function is the earliest opportunity for - // the client to observe the output of the type checker, - // which may be useful to reduce analysis latency when loading - // a large program. - // - // The function is permitted to modify info.Info, for instance - // to clear data structures that are no longer needed, which can - // dramatically reduce peak memory consumption. - // - // The function may be called twice for the same PackageInfo: - // once for the files of the package and again for the - // in-package test files. - // - // It must be safe to call concurrently from multiple goroutines. - AfterTypeCheck func(info *PackageInfo, files []*ast.File) -} - -// A PkgSpec specifies a non-importable package to be created by Load. -// Files are processed first, but typically only one of Files and -// Filenames is provided. The path needn't be globally unique. -// -// For vendoring purposes, the package's directory is the one that -// contains the first file. -type PkgSpec struct { - Path string // package path ("" => use package declaration) - Files []*ast.File // ASTs of already-parsed files - Filenames []string // names of files to be parsed -} - -// A Program is a Go program loaded from source as specified by a Config. -type Program struct { - Fset *token.FileSet // the file set for this program - - // Created[i] contains the initial package whose ASTs or - // filenames were supplied by Config.CreatePkgs[i], followed by - // the external test package, if any, of each package in - // Config.ImportPkgs ordered by ImportPath. - // - // NOTE: these files must not import "C". Cgo preprocessing is - // only performed on imported packages, not ad hoc packages. - // - // TODO(adonovan): we need to copy and adapt the logic of - // goFilesPackage (from $GOROOT/src/cmd/go/build.go) and make - // Config.Import and Config.Create methods return the same kind - // of entity, essentially a build.Package. - // Perhaps we can even reuse that type directly. - Created []*PackageInfo - - // Imported contains the initially imported packages, - // as specified by Config.ImportPkgs. - Imported map[string]*PackageInfo - - // AllPackages contains the PackageInfo of every package - // encountered by Load: all initial packages and all - // dependencies, including incomplete ones. - AllPackages map[*types.Package]*PackageInfo - - // importMap is the canonical mapping of package paths to - // packages. It contains all Imported initial packages, but not - // Created ones, and all imported dependencies. - importMap map[string]*types.Package -} - -// PackageInfo holds the ASTs and facts derived by the type-checker -// for a single package. -// -// Not mutated once exposed via the API. -type PackageInfo struct { - Pkg *types.Package - Importable bool // true if 'import "Pkg.Path()"' would resolve to this - TransitivelyErrorFree bool // true if Pkg and all its dependencies are free of errors - Files []*ast.File // syntax trees for the package's files - Errors []error // non-nil if the package had errors - types.Info // type-checker deductions. - dir string // package directory - - checker *types.Checker // transient type-checker state - errorFunc func(error) -} - -func (info *PackageInfo) String() string { return info.Pkg.Path() } - -func (info *PackageInfo) appendError(err error) { - if info.errorFunc != nil { - info.errorFunc(err) - } else { - fmt.Fprintln(os.Stderr, err) - } - info.Errors = append(info.Errors, err) -} - -func (conf *Config) fset() *token.FileSet { - if conf.Fset == nil { - conf.Fset = token.NewFileSet() - } - return conf.Fset -} - -// ParseFile is a convenience function (intended for testing) that invokes -// the parser using the Config's FileSet, which is initialized if nil. -// -// src specifies the parser input as a string, []byte, or io.Reader, and -// filename is its apparent name. If src is nil, the contents of -// filename are read from the file system. -func (conf *Config) ParseFile(filename string, src interface{}) (*ast.File, error) { - // TODO(adonovan): use conf.build() etc like parseFiles does. - return parser.ParseFile(conf.fset(), filename, src, conf.ParserMode) -} - -// FromArgsUsage is a partial usage message that applications calling -// FromArgs may wish to include in their -help output. -const FromArgsUsage = ` - is a list of arguments denoting a set of initial packages. -It may take one of two forms: - -1. A list of *.go source files. - - All of the specified files are loaded, parsed and type-checked - as a single package. All the files must belong to the same directory. - -2. A list of import paths, each denoting a package. - - The package's directory is found relative to the $GOROOT and - $GOPATH using similar logic to 'go build', and the *.go files in - that directory are loaded, parsed and type-checked as a single - package. - - In addition, all *_test.go files in the directory are then loaded - and parsed. Those files whose package declaration equals that of - the non-*_test.go files are included in the primary package. Test - files whose package declaration ends with "_test" are type-checked - as another package, the 'external' test package, so that a single - import path may denote two packages. (Whether this behaviour is - enabled is tool-specific, and may depend on additional flags.) - -A '--' argument terminates the list of packages. -` - -// FromArgs interprets args as a set of initial packages to load from -// source and updates the configuration. It returns the list of -// unconsumed arguments. -// -// It is intended for use in command-line interfaces that require a -// set of initial packages to be specified; see FromArgsUsage message -// for details. -// -// Only superficial errors are reported at this stage; errors dependent -// on I/O are detected during Load. -func (conf *Config) FromArgs(args []string, xtest bool) ([]string, error) { - var rest []string - for i, arg := range args { - if arg == "--" { - rest = args[i+1:] - args = args[:i] - break // consume "--" and return the remaining args - } - } - - if len(args) > 0 && strings.HasSuffix(args[0], ".go") { - // Assume args is a list of a *.go files - // denoting a single ad hoc package. - for _, arg := range args { - if !strings.HasSuffix(arg, ".go") { - return nil, fmt.Errorf("named files must be .go files: %s", arg) - } - } - conf.CreateFromFilenames("", args...) - } else { - // Assume args are directories each denoting a - // package and (perhaps) an external test, iff xtest. - for _, arg := range args { - if xtest { - conf.ImportWithTests(arg) - } else { - conf.Import(arg) - } - } - } - - return rest, nil -} - -// CreateFromFilenames is a convenience function that adds -// a conf.CreatePkgs entry to create a package of the specified *.go -// files. -func (conf *Config) CreateFromFilenames(path string, filenames ...string) { - conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Filenames: filenames}) -} - -// CreateFromFiles is a convenience function that adds a conf.CreatePkgs -// entry to create package of the specified path and parsed files. -func (conf *Config) CreateFromFiles(path string, files ...*ast.File) { - conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Files: files}) -} - -// ImportWithTests is a convenience function that adds path to -// ImportPkgs, the set of initial source packages located relative to -// $GOPATH. The package will be augmented by any *_test.go files in -// its directory that contain a "package x" (not "package x_test") -// declaration. -// -// In addition, if any *_test.go files contain a "package x_test" -// declaration, an additional package comprising just those files will -// be added to CreatePkgs. -func (conf *Config) ImportWithTests(path string) { conf.addImport(path, true) } - -// Import is a convenience function that adds path to ImportPkgs, the -// set of initial packages that will be imported from source. -func (conf *Config) Import(path string) { conf.addImport(path, false) } - -func (conf *Config) addImport(path string, tests bool) { - if path == "C" { - return // ignore; not a real package - } - if conf.ImportPkgs == nil { - conf.ImportPkgs = make(map[string]bool) - } - conf.ImportPkgs[path] = conf.ImportPkgs[path] || tests -} - -// PathEnclosingInterval returns the PackageInfo and ast.Node that -// contain source interval [start, end), and all the node's ancestors -// up to the AST root. It searches all ast.Files of all packages in prog. -// exact is defined as for astutil.PathEnclosingInterval. -// -// The zero value is returned if not found. -func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageInfo, path []ast.Node, exact bool) { - for _, info := range prog.AllPackages { - for _, f := range info.Files { - if f.Pos() == token.NoPos { - // This can happen if the parser saw - // too many errors and bailed out. - // (Use parser.AllErrors to prevent that.) - continue - } - if !tokenFileContainsPos(prog.Fset.File(f.Pos()), start) { - continue - } - if path, exact := astutil.PathEnclosingInterval(f, start, end); path != nil { - return info, path, exact - } - } - } - return nil, nil, false -} - -// InitialPackages returns a new slice containing the set of initial -// packages (Created + Imported) in unspecified order. -func (prog *Program) InitialPackages() []*PackageInfo { - infos := make([]*PackageInfo, 0, len(prog.Created)+len(prog.Imported)) - infos = append(infos, prog.Created...) - for _, info := range prog.Imported { - infos = append(infos, info) - } - return infos -} - -// Package returns the ASTs and results of type checking for the -// specified package. -func (prog *Program) Package(path string) *PackageInfo { - if info, ok := prog.AllPackages[prog.importMap[path]]; ok { - return info - } - for _, info := range prog.Created { - if path == info.Pkg.Path() { - return info - } - } - return nil -} - -// ---------- Implementation ---------- - -// importer holds the working state of the algorithm. -type importer struct { - conf *Config // the client configuration - start time.Time // for logging - - progMu sync.Mutex // guards prog - prog *Program // the resulting program - - // findpkg is a memoization of FindPackage. - findpkgMu sync.Mutex // guards findpkg - findpkg map[findpkgKey]*findpkgValue - - importedMu sync.Mutex // guards imported - imported map[string]*importInfo // all imported packages (incl. failures) by import path - - // import dependency graph: graph[x][y] => x imports y - // - // Since non-importable packages cannot be cyclic, we ignore - // their imports, thus we only need the subgraph over importable - // packages. Nodes are identified by their import paths. - graphMu sync.Mutex - graph map[string]map[string]bool -} - -type findpkgKey struct { - importPath string - fromDir string - mode build.ImportMode -} - -type findpkgValue struct { - ready chan struct{} // closed to broadcast readiness - bp *build.Package - err error -} - -// importInfo tracks the success or failure of a single import. -// -// Upon completion, exactly one of info and err is non-nil: -// info on successful creation of a package, err otherwise. -// A successful package may still contain type errors. -type importInfo struct { - path string // import path - info *PackageInfo // results of typechecking (including errors) - complete chan struct{} // closed to broadcast that info is set. -} - -// awaitCompletion blocks until ii is complete, -// i.e. the info field is safe to inspect. -func (ii *importInfo) awaitCompletion() { - <-ii.complete // wait for close -} - -// Complete marks ii as complete. -// Its info and err fields will not be subsequently updated. -func (ii *importInfo) Complete(info *PackageInfo) { - if info == nil { - panic("info == nil") - } - ii.info = info - close(ii.complete) -} - -type importError struct { - path string // import path - err error // reason for failure to create a package -} - -// Load creates the initial packages specified by conf.{Create,Import}Pkgs, -// loading their dependencies packages as needed. -// -// On success, Load returns a Program containing a PackageInfo for -// each package. On failure, it returns an error. -// -// If AllowErrors is true, Load will return a Program even if some -// packages contained I/O, parser or type errors, or if dependencies -// were missing. (Such errors are accessible via PackageInfo.Errors. If -// false, Load will fail if any package had an error. -// -// It is an error if no packages were loaded. -func (conf *Config) Load() (*Program, error) { - // Create a simple default error handler for parse/type errors. - if conf.TypeChecker.Error == nil { - conf.TypeChecker.Error = func(e error) { fmt.Fprintln(os.Stderr, e) } - } - - // Set default working directory for relative package references. - if conf.Cwd == "" { - var err error - conf.Cwd, err = os.Getwd() - if err != nil { - return nil, err - } - } - - // Install default FindPackage hook using go/build logic. - if conf.FindPackage == nil { - conf.FindPackage = (*build.Context).Import - } - - prog := &Program{ - Fset: conf.fset(), - Imported: make(map[string]*PackageInfo), - importMap: make(map[string]*types.Package), - AllPackages: make(map[*types.Package]*PackageInfo), - } - - imp := importer{ - conf: conf, - prog: prog, - findpkg: make(map[findpkgKey]*findpkgValue), - imported: make(map[string]*importInfo), - start: time.Now(), - graph: make(map[string]map[string]bool), - } - - // -- loading proper (concurrent phase) -------------------------------- - - var errpkgs []string // packages that contained errors - - // Load the initially imported packages and their dependencies, - // in parallel. - // No vendor check on packages imported from the command line. - infos, importErrors := imp.importAll("", conf.Cwd, conf.ImportPkgs, ignoreVendor) - for _, ie := range importErrors { - conf.TypeChecker.Error(ie.err) // failed to create package - errpkgs = append(errpkgs, ie.path) - } - for _, info := range infos { - prog.Imported[info.Pkg.Path()] = info - } - - // Augment the designated initial packages by their tests. - // Dependencies are loaded in parallel. - var xtestPkgs []*build.Package - for importPath, augment := range conf.ImportPkgs { - if !augment { - continue - } - - // No vendor check on packages imported from command line. - bp, err := imp.findPackage(importPath, conf.Cwd, ignoreVendor) - if err != nil { - // Package not found, or can't even parse package declaration. - // Already reported by previous loop; ignore it. - continue - } - - // Needs external test package? - if len(bp.XTestGoFiles) > 0 { - xtestPkgs = append(xtestPkgs, bp) - } - - // Consult the cache using the canonical package path. - path := bp.ImportPath - imp.importedMu.Lock() // (unnecessary, we're sequential here) - ii, ok := imp.imported[path] - // Paranoid checks added due to issue #11012. - if !ok { - // Unreachable. - // The previous loop called importAll and thus - // startLoad for each path in ImportPkgs, which - // populates imp.imported[path] with a non-zero value. - panic(fmt.Sprintf("imported[%q] not found", path)) - } - if ii == nil { - // Unreachable. - // The ii values in this loop are the same as in - // the previous loop, which enforced the invariant - // that at least one of ii.err and ii.info is non-nil. - panic(fmt.Sprintf("imported[%q] == nil", path)) - } - if ii.info == nil { - // Unreachable. - // awaitCompletion has the postcondition - // ii.info != nil. - panic(fmt.Sprintf("imported[%q].info = nil", path)) - } - info := ii.info - imp.importedMu.Unlock() - - // Parse the in-package test files. - files, errs := imp.conf.parsePackageFiles(bp, 't') - for _, err := range errs { - info.appendError(err) - } - - // The test files augmenting package P cannot be imported, - // but may import packages that import P, - // so we must disable the cycle check. - imp.addFiles(info, files, false) - } - - createPkg := func(path, dir string, files []*ast.File, errs []error) { - info := imp.newPackageInfo(path, dir) - for _, err := range errs { - info.appendError(err) - } - - // Ad hoc packages are non-importable, - // so no cycle check is needed. - // addFiles loads dependencies in parallel. - imp.addFiles(info, files, false) - prog.Created = append(prog.Created, info) - } - - // Create packages specified by conf.CreatePkgs. - for _, cp := range conf.CreatePkgs { - files, errs := parseFiles(conf.fset(), conf.build(), nil, conf.Cwd, cp.Filenames, conf.ParserMode) - files = append(files, cp.Files...) - - path := cp.Path - if path == "" { - if len(files) > 0 { - path = files[0].Name.Name - } else { - path = "(unnamed)" - } - } - - dir := conf.Cwd - if len(files) > 0 && files[0].Pos().IsValid() { - dir = filepath.Dir(conf.fset().File(files[0].Pos()).Name()) - } - createPkg(path, dir, files, errs) - } - - // Create external test packages. - sort.Sort(byImportPath(xtestPkgs)) - for _, bp := range xtestPkgs { - files, errs := imp.conf.parsePackageFiles(bp, 'x') - createPkg(bp.ImportPath+"_test", bp.Dir, files, errs) - } - - // -- finishing up (sequential) ---------------------------------------- - - if len(prog.Imported)+len(prog.Created) == 0 { - return nil, errors.New("no initial packages were loaded") - } - - // Create infos for indirectly imported packages. - // e.g. incomplete packages without syntax, loaded from export data. - for _, obj := range prog.importMap { - info := prog.AllPackages[obj] - if info == nil { - prog.AllPackages[obj] = &PackageInfo{Pkg: obj, Importable: true} - } else { - // finished - info.checker = nil - info.errorFunc = nil - } - } - - if !conf.AllowErrors { - // Report errors in indirectly imported packages. - for _, info := range prog.AllPackages { - if len(info.Errors) > 0 { - errpkgs = append(errpkgs, info.Pkg.Path()) - } - } - if errpkgs != nil { - var more string - if len(errpkgs) > 3 { - more = fmt.Sprintf(" and %d more", len(errpkgs)-3) - errpkgs = errpkgs[:3] - } - return nil, fmt.Errorf("couldn't load packages due to errors: %s%s", - strings.Join(errpkgs, ", "), more) - } - } - - markErrorFreePackages(prog.AllPackages) - - return prog, nil -} - -type byImportPath []*build.Package - -func (b byImportPath) Len() int { return len(b) } -func (b byImportPath) Less(i, j int) bool { return b[i].ImportPath < b[j].ImportPath } -func (b byImportPath) Swap(i, j int) { b[i], b[j] = b[j], b[i] } - -// markErrorFreePackages sets the TransitivelyErrorFree flag on all -// applicable packages. -func markErrorFreePackages(allPackages map[*types.Package]*PackageInfo) { - // Build the transpose of the import graph. - importedBy := make(map[*types.Package]map[*types.Package]bool) - for P := range allPackages { - for _, Q := range P.Imports() { - clients, ok := importedBy[Q] - if !ok { - clients = make(map[*types.Package]bool) - importedBy[Q] = clients - } - clients[P] = true - } - } - - // Find all packages reachable from some error package. - reachable := make(map[*types.Package]bool) - var visit func(*types.Package) - visit = func(p *types.Package) { - if !reachable[p] { - reachable[p] = true - for q := range importedBy[p] { - visit(q) - } - } - } - for _, info := range allPackages { - if len(info.Errors) > 0 { - visit(info.Pkg) - } - } - - // Mark the others as "transitively error-free". - for _, info := range allPackages { - if !reachable[info.Pkg] { - info.TransitivelyErrorFree = true - } - } -} - -// build returns the effective build context. -func (conf *Config) build() *build.Context { - if conf.Build != nil { - return conf.Build - } - return &build.Default -} - -// parsePackageFiles enumerates the files belonging to package path, -// then loads, parses and returns them, plus a list of I/O or parse -// errors that were encountered. -// -// 'which' indicates which files to include: -// -// 'g': include non-test *.go source files (GoFiles + processed CgoFiles) -// 't': include in-package *_test.go source files (TestGoFiles) -// 'x': include external *_test.go source files. (XTestGoFiles) -func (conf *Config) parsePackageFiles(bp *build.Package, which rune) ([]*ast.File, []error) { - if bp.ImportPath == "unsafe" { - return nil, nil - } - var filenames []string - switch which { - case 'g': - filenames = bp.GoFiles - case 't': - filenames = bp.TestGoFiles - case 'x': - filenames = bp.XTestGoFiles - default: - panic(which) - } - - files, errs := parseFiles(conf.fset(), conf.build(), conf.DisplayPath, bp.Dir, filenames, conf.ParserMode) - - // Preprocess CgoFiles and parse the outputs (sequentially). - if which == 'g' && bp.CgoFiles != nil { - cgofiles, err := cgo.ProcessFiles(bp, conf.fset(), conf.DisplayPath, conf.ParserMode) - if err != nil { - errs = append(errs, err) - } else { - files = append(files, cgofiles...) - } - } - - return files, errs -} - -// doImport imports the package denoted by path. -// It implements the types.Importer signature. -// -// It returns an error if a package could not be created -// (e.g. go/build or parse error), but type errors are reported via -// the types.Config.Error callback (the first of which is also saved -// in the package's PackageInfo). -// -// Idempotent. -func (imp *importer) doImport(from *PackageInfo, to string) (*types.Package, error) { - if to == "C" { - // This should be unreachable, but ad hoc packages are - // not currently subject to cgo preprocessing. - // See https://golang.org/issue/11627. - return nil, fmt.Errorf(`the loader doesn't cgo-process ad hoc packages like %q; see Go issue 11627`, - from.Pkg.Path()) - } - - bp, err := imp.findPackage(to, from.dir, 0) - if err != nil { - return nil, err - } - - // The standard unsafe package is handled specially, - // and has no PackageInfo. - if bp.ImportPath == "unsafe" { - return types.Unsafe, nil - } - - // Look for the package in the cache using its canonical path. - path := bp.ImportPath - imp.importedMu.Lock() - ii := imp.imported[path] - imp.importedMu.Unlock() - if ii == nil { - panic("internal error: unexpected import: " + path) - } - if ii.info != nil { - return ii.info.Pkg, nil - } - - // Import of incomplete package: this indicates a cycle. - fromPath := from.Pkg.Path() - if cycle := imp.findPath(path, fromPath); cycle != nil { - // Normalize cycle: start from alphabetically largest node. - pos, start := -1, "" - for i, s := range cycle { - if pos < 0 || s > start { - pos, start = i, s - } - } - cycle = append(cycle, cycle[:pos]...)[pos:] // rotate cycle to start from largest - cycle = append(cycle, cycle[0]) // add start node to end to show cycliness - return nil, fmt.Errorf("import cycle: %s", strings.Join(cycle, " -> ")) - } - - panic("internal error: import of incomplete (yet acyclic) package: " + fromPath) -} - -// findPackage locates the package denoted by the importPath in the -// specified directory. -func (imp *importer) findPackage(importPath, fromDir string, mode build.ImportMode) (*build.Package, error) { - // We use a non-blocking duplicate-suppressing cache (gopl.io ยง9.7) - // to avoid holding the lock around FindPackage. - key := findpkgKey{importPath, fromDir, mode} - imp.findpkgMu.Lock() - v, ok := imp.findpkg[key] - if ok { - // cache hit - imp.findpkgMu.Unlock() - - <-v.ready // wait for entry to become ready - } else { - // Cache miss: this goroutine becomes responsible for - // populating the map entry and broadcasting its readiness. - v = &findpkgValue{ready: make(chan struct{})} - imp.findpkg[key] = v - imp.findpkgMu.Unlock() - - ioLimit <- true - v.bp, v.err = imp.conf.FindPackage(imp.conf.build(), importPath, fromDir, mode) - <-ioLimit - - if _, ok := v.err.(*build.NoGoError); ok { - v.err = nil // empty directory is not an error - } - - close(v.ready) // broadcast ready condition - } - return v.bp, v.err -} - -// importAll loads, parses, and type-checks the specified packages in -// parallel and returns their completed importInfos in unspecified order. -// -// fromPath is the package path of the importing package, if it is -// importable, "" otherwise. It is used for cycle detection. -// -// fromDir is the directory containing the import declaration that -// caused these imports. -func (imp *importer) importAll(fromPath, fromDir string, imports map[string]bool, mode build.ImportMode) (infos []*PackageInfo, errors []importError) { - if fromPath != "" { - // We're loading a set of imports. - // - // We must record graph edges from the importing package - // to its dependencies, and check for cycles. - imp.graphMu.Lock() - deps, ok := imp.graph[fromPath] - if !ok { - deps = make(map[string]bool) - imp.graph[fromPath] = deps - } - for importPath := range imports { - deps[importPath] = true - } - imp.graphMu.Unlock() - } - - var pending []*importInfo - for importPath := range imports { - if fromPath != "" { - if cycle := imp.findPath(importPath, fromPath); cycle != nil { - // Cycle-forming import: we must not check it - // since it would deadlock. - if trace { - fmt.Fprintf(os.Stderr, "import cycle: %q\n", cycle) - } - continue - } - } - bp, err := imp.findPackage(importPath, fromDir, mode) - if err != nil { - errors = append(errors, importError{ - path: importPath, - err: err, - }) - continue - } - pending = append(pending, imp.startLoad(bp)) - } - - for _, ii := range pending { - ii.awaitCompletion() - infos = append(infos, ii.info) - } - - return infos, errors -} - -// findPath returns an arbitrary path from 'from' to 'to' in the import -// graph, or nil if there was none. -func (imp *importer) findPath(from, to string) []string { - imp.graphMu.Lock() - defer imp.graphMu.Unlock() - - seen := make(map[string]bool) - var search func(stack []string, importPath string) []string - search = func(stack []string, importPath string) []string { - if !seen[importPath] { - seen[importPath] = true - stack = append(stack, importPath) - if importPath == to { - return stack - } - for x := range imp.graph[importPath] { - if p := search(stack, x); p != nil { - return p - } - } - } - return nil - } - return search(make([]string, 0, 20), from) -} - -// startLoad initiates the loading, parsing and type-checking of the -// specified package and its dependencies, if it has not already begun. -// -// It returns an importInfo, not necessarily in a completed state. The -// caller must call awaitCompletion() before accessing its info field. -// -// startLoad is concurrency-safe and idempotent. -func (imp *importer) startLoad(bp *build.Package) *importInfo { - path := bp.ImportPath - imp.importedMu.Lock() - ii, ok := imp.imported[path] - if !ok { - ii = &importInfo{path: path, complete: make(chan struct{})} - imp.imported[path] = ii - go func() { - info := imp.load(bp) - ii.Complete(info) - }() - } - imp.importedMu.Unlock() - - return ii -} - -// load implements package loading by parsing Go source files -// located by go/build. -func (imp *importer) load(bp *build.Package) *PackageInfo { - info := imp.newPackageInfo(bp.ImportPath, bp.Dir) - info.Importable = true - files, errs := imp.conf.parsePackageFiles(bp, 'g') - for _, err := range errs { - info.appendError(err) - } - - imp.addFiles(info, files, true) - - imp.progMu.Lock() - imp.prog.importMap[bp.ImportPath] = info.Pkg - imp.progMu.Unlock() - - return info -} - -// addFiles adds and type-checks the specified files to info, loading -// their dependencies if needed. The order of files determines the -// package initialization order. It may be called multiple times on the -// same package. Errors are appended to the info.Errors field. -// -// cycleCheck determines whether the imports within files create -// dependency edges that should be checked for potential cycles. -func (imp *importer) addFiles(info *PackageInfo, files []*ast.File, cycleCheck bool) { - // Ensure the dependencies are loaded, in parallel. - var fromPath string - if cycleCheck { - fromPath = info.Pkg.Path() - } - // TODO(adonovan): opt: make the caller do scanImports. - // Callers with a build.Package can skip it. - imp.importAll(fromPath, info.dir, scanImports(files), 0) - - if trace { - fmt.Fprintf(os.Stderr, "%s: start %q (%d)\n", - time.Since(imp.start), info.Pkg.Path(), len(files)) - } - - // Don't call checker.Files on Unsafe, even with zero files, - // because it would mutate the package, which is a global. - if info.Pkg == types.Unsafe { - if len(files) > 0 { - panic(`"unsafe" package contains unexpected files`) - } - } else { - // Ignore the returned (first) error since we - // already collect them all in the PackageInfo. - info.checker.Files(files) - info.Files = append(info.Files, files...) - } - - if imp.conf.AfterTypeCheck != nil { - imp.conf.AfterTypeCheck(info, files) - } - - if trace { - fmt.Fprintf(os.Stderr, "%s: stop %q\n", - time.Since(imp.start), info.Pkg.Path()) - } -} - -func (imp *importer) newPackageInfo(path, dir string) *PackageInfo { - var pkg *types.Package - if path == "unsafe" { - pkg = types.Unsafe - } else { - pkg = types.NewPackage(path, "") - } - info := &PackageInfo{ - Pkg: pkg, - Info: types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Instances: make(map[*ast.Ident]types.Instance), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - }, - errorFunc: imp.conf.TypeChecker.Error, - dir: dir, - } - versions.InitFileVersions(&info.Info) - - // Copy the types.Config so we can vary it across PackageInfos. - tc := imp.conf.TypeChecker - tc.IgnoreFuncBodies = false - if f := imp.conf.TypeCheckFuncBodies; f != nil { - tc.IgnoreFuncBodies = !f(path) - } - tc.Importer = closure{imp, info} - tc.Error = info.appendError // appendError wraps the user's Error function - - info.checker = types.NewChecker(&tc, imp.conf.fset(), pkg, &info.Info) - imp.progMu.Lock() - imp.prog.AllPackages[pkg] = info - imp.progMu.Unlock() - return info -} - -type closure struct { - imp *importer - info *PackageInfo -} - -func (c closure) Import(to string) (*types.Package, error) { return c.imp.doImport(c.info, to) } diff --git a/vendor/golang.org/x/tools/go/loader/util.go b/vendor/golang.org/x/tools/go/loader/util.go deleted file mode 100644 index 3a80acae6..000000000 --- a/vendor/golang.org/x/tools/go/loader/util.go +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package loader - -import ( - "go/ast" - "go/build" - "go/parser" - "go/token" - "io" - "os" - "strconv" - "sync" - - "golang.org/x/tools/go/buildutil" -) - -// We use a counting semaphore to limit -// the number of parallel I/O calls per process. -var ioLimit = make(chan bool, 10) - -// parseFiles parses the Go source files within directory dir and -// returns the ASTs of the ones that could be at least partially parsed, -// along with a list of I/O and parse errors encountered. -// -// I/O is done via ctxt, which may specify a virtual file system. -// displayPath is used to transform the filenames attached to the ASTs. -func parseFiles(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, files []string, mode parser.Mode) ([]*ast.File, []error) { - if displayPath == nil { - displayPath = func(path string) string { return path } - } - var wg sync.WaitGroup - n := len(files) - parsed := make([]*ast.File, n) - errors := make([]error, n) - for i, file := range files { - if !buildutil.IsAbsPath(ctxt, file) { - file = buildutil.JoinPath(ctxt, dir, file) - } - wg.Add(1) - go func(i int, file string) { - ioLimit <- true // wait - defer func() { - wg.Done() - <-ioLimit // signal - }() - var rd io.ReadCloser - var err error - if ctxt.OpenFile != nil { - rd, err = ctxt.OpenFile(file) - } else { - rd, err = os.Open(file) - } - if err != nil { - errors[i] = err // open failed - return - } - - // ParseFile may return both an AST and an error. - parsed[i], errors[i] = parser.ParseFile(fset, displayPath(file), rd, mode) - rd.Close() - }(i, file) - } - wg.Wait() - - // Eliminate nils, preserving order. - var o int - for _, f := range parsed { - if f != nil { - parsed[o] = f - o++ - } - } - parsed = parsed[:o] - - o = 0 - for _, err := range errors { - if err != nil { - errors[o] = err - o++ - } - } - errors = errors[:o] - - return parsed, errors -} - -// scanImports returns the set of all import paths from all -// import specs in the specified files. -func scanImports(files []*ast.File) map[string]bool { - imports := make(map[string]bool) - for _, f := range files { - for _, decl := range f.Decls { - if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT { - for _, spec := range decl.Specs { - spec := spec.(*ast.ImportSpec) - - // NB: do not assume the program is well-formed! - path, err := strconv.Unquote(spec.Path.Value) - if err != nil { - continue // quietly ignore the error - } - if path == "C" { - continue // skip pseudopackage - } - imports[path] = true - } - } - } - } - return imports -} - -// ---------- Internal helpers ---------- - -// TODO(adonovan): make this a method: func (*token.File) Contains(token.Pos) -func tokenFileContainsPos(f *token.File, pos token.Pos) bool { - p := int(pos) - base := f.Base() - return base <= p && p < base+f.Size() -} diff --git a/vendor/modules.txt b/vendor/modules.txt index be8513feb..4972f4e92 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -75,7 +75,7 @@ github.com/KimMachineGun/automemlimit/memlimit # github.com/Masterminds/goutils v1.1.1 ## explicit github.com/Masterminds/goutils -# github.com/Masterminds/semver/v3 v3.2.0 +# github.com/Masterminds/semver/v3 v3.2.1 ## explicit; go 1.18 github.com/Masterminds/semver/v3 # github.com/Masterminds/sprig/v3 v3.2.3 @@ -192,7 +192,7 @@ github.com/dsoprea/go-utility/v2/image # github.com/dustin/go-humanize v1.0.1 ## explicit; go 1.16 github.com/dustin/go-humanize -# github.com/felixge/httpsnoop v1.0.3 +# github.com/felixge/httpsnoop v1.0.4 ## explicit; go 1.13 github.com/felixge/httpsnoop # github.com/fsnotify/fsnotify v1.7.0 @@ -242,8 +242,8 @@ github.com/go-logr/logr/funcr # github.com/go-logr/stdr v1.2.2 ## explicit; go 1.16 github.com/go-logr/stdr -# github.com/go-openapi/analysis v0.21.4 -## explicit; go 1.13 +# github.com/go-openapi/analysis v0.23.0 +## explicit; go 1.20 github.com/go-openapi/analysis github.com/go-openapi/analysis/internal/debug github.com/go-openapi/analysis/internal/flatten/normalize @@ -251,25 +251,25 @@ github.com/go-openapi/analysis/internal/flatten/operations github.com/go-openapi/analysis/internal/flatten/replace github.com/go-openapi/analysis/internal/flatten/schutils github.com/go-openapi/analysis/internal/flatten/sortref -# github.com/go-openapi/errors v0.20.4 -## explicit; go 1.14 +# github.com/go-openapi/errors v0.22.0 +## explicit; go 1.20 github.com/go-openapi/errors -# github.com/go-openapi/inflect v0.19.0 -## explicit +# github.com/go-openapi/inflect v0.21.0 +## explicit; go 1.20 github.com/go-openapi/inflect -# github.com/go-openapi/jsonpointer v0.19.6 -## explicit; go 1.13 +# github.com/go-openapi/jsonpointer v0.21.0 +## explicit; go 1.20 github.com/go-openapi/jsonpointer -# github.com/go-openapi/jsonreference v0.20.2 -## explicit; go 1.13 +# github.com/go-openapi/jsonreference v0.21.0 +## explicit; go 1.20 github.com/go-openapi/jsonreference github.com/go-openapi/jsonreference/internal -# github.com/go-openapi/loads v0.21.2 -## explicit; go 1.13 +# github.com/go-openapi/loads v0.22.0 +## explicit; go 1.20 github.com/go-openapi/loads github.com/go-openapi/loads/fmts -# github.com/go-openapi/runtime v0.26.0 -## explicit; go 1.18 +# github.com/go-openapi/runtime v0.28.0 +## explicit; go 1.20 github.com/go-openapi/runtime github.com/go-openapi/runtime/logger github.com/go-openapi/runtime/middleware @@ -277,17 +277,17 @@ github.com/go-openapi/runtime/middleware/denco github.com/go-openapi/runtime/middleware/header github.com/go-openapi/runtime/middleware/untyped github.com/go-openapi/runtime/security -# github.com/go-openapi/spec v0.20.9 -## explicit; go 1.13 +# github.com/go-openapi/spec v0.21.0 +## explicit; go 1.20 github.com/go-openapi/spec -# github.com/go-openapi/strfmt v0.21.7 -## explicit; go 1.19 +# github.com/go-openapi/strfmt v0.23.0 +## explicit; go 1.20 github.com/go-openapi/strfmt -# github.com/go-openapi/swag v0.22.4 -## explicit; go 1.18 +# github.com/go-openapi/swag v0.23.0 +## explicit; go 1.20 github.com/go-openapi/swag -# github.com/go-openapi/validate v0.22.1 -## explicit; go 1.14 +# github.com/go-openapi/validate v0.24.0 +## explicit; go 1.20 github.com/go-openapi/validate # github.com/go-playground/form/v4 v4.2.1 ## explicit; go 1.13 @@ -302,8 +302,8 @@ github.com/go-playground/universal-translator # github.com/go-playground/validator/v10 v10.19.0 ## explicit; go 1.18 github.com/go-playground/validator/v10 -# github.com/go-swagger/go-swagger v0.30.5 -## explicit; go 1.19 +# github.com/go-swagger/go-swagger v0.30.6-0.20240418033037-c46c303aaa02 +## explicit; go 1.20 github.com/go-swagger/go-swagger/cmd/swagger github.com/go-swagger/go-swagger/cmd/swagger/commands github.com/go-swagger/go-swagger/cmd/swagger/commands/diff @@ -311,7 +311,6 @@ github.com/go-swagger/go-swagger/cmd/swagger/commands/generate github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd github.com/go-swagger/go-swagger/codescan github.com/go-swagger/go-swagger/generator -github.com/go-swagger/go-swagger/scan # github.com/go-xmlfmt/xmlfmt v0.0.0-20211206191508-7fd73a941850 ## explicit github.com/go-xmlfmt/xmlfmt @@ -351,8 +350,8 @@ github.com/gorilla/css/scanner # github.com/gorilla/feeds v1.1.2 ## explicit; go 1.20 github.com/gorilla/feeds -# github.com/gorilla/handlers v1.5.1 -## explicit; go 1.14 +# github.com/gorilla/handlers v1.5.2 +## explicit; go 1.20 github.com/gorilla/handlers # github.com/gorilla/securecookie v1.1.2 ## explicit; go 1.20 @@ -391,10 +390,10 @@ github.com/hashicorp/hcl/hcl/token github.com/hashicorp/hcl/json/parser github.com/hashicorp/hcl/json/scanner github.com/hashicorp/hcl/json/token -# github.com/huandu/xstrings v1.3.3 +# github.com/huandu/xstrings v1.4.0 ## explicit; go 1.12 github.com/huandu/xstrings -# github.com/imdario/mergo v0.3.12 +# github.com/imdario/mergo v0.3.16 ## explicit; go 1.13 github.com/imdario/mergo # github.com/inconshreveable/mousetrap v1.1.0 @@ -562,8 +561,8 @@ github.com/quasoft/memstore # github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec ## explicit; go 1.12 github.com/remyoudompheng/bigfft -# github.com/rogpeppe/go-internal v1.10.0 -## explicit; go 1.19 +# github.com/rogpeppe/go-internal v1.12.0 +## explicit; go 1.20 github.com/rogpeppe/go-internal/fmtsort # github.com/rs/xid v1.5.0 ## explicit; go 1.12 @@ -574,7 +573,7 @@ github.com/sagikazarmark/locafero # github.com/sagikazarmark/slog-shim v0.1.0 ## explicit; go 1.20 github.com/sagikazarmark/slog-shim -# github.com/shopspring/decimal v1.2.0 +# github.com/shopspring/decimal v1.3.1 ## explicit; go 1.13 github.com/shopspring/decimal # github.com/sirupsen/logrus v1.9.3 @@ -1012,7 +1011,7 @@ golang.org/x/crypto/scrypt golang.org/x/crypto/sha3 golang.org/x/crypto/ssh golang.org/x/crypto/ssh/internal/bcrypt_pbkdf -# golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3 +# golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 ## explicit; go 1.20 golang.org/x/exp/constraints golang.org/x/exp/slices @@ -1060,6 +1059,7 @@ golang.org/x/oauth2 golang.org/x/oauth2/internal # golang.org/x/sync v0.6.0 ## explicit; go 1.18 +golang.org/x/sync/errgroup golang.org/x/sync/semaphore # golang.org/x/sys v0.19.0 ## explicit; go 1.18 @@ -1092,11 +1092,8 @@ golang.org/x/text/width # golang.org/x/tools v0.19.0 ## explicit; go 1.19 golang.org/x/tools/go/ast/astutil -golang.org/x/tools/go/buildutil golang.org/x/tools/go/gcexportdata -golang.org/x/tools/go/internal/cgo golang.org/x/tools/go/internal/packagesdriver -golang.org/x/tools/go/loader golang.org/x/tools/go/packages golang.org/x/tools/go/types/objectpath golang.org/x/tools/imports