mirror of
https://github.com/containers/skopeo.git
synced 2025-04-28 03:10:18 +00:00
Add support for Fulcio and Rekor, and --sign-by-sigstore=param-file
(skopeo copy) and (skopeo sync) now support --sign-by-sigstore=param-file, using the containers-sigstore-signing-params.yaml(5) file format. That notably adds support for Fulcio and Rekor signing. Signed-off-by: Miloslav Trmač <mitr@redhat.com>
This commit is contained in:
parent
03b5bdec24
commit
bb1ac89327
@ -13,6 +13,8 @@ import (
|
||||
"github.com/containers/image/v5/docker/reference"
|
||||
"github.com/containers/image/v5/manifest"
|
||||
"github.com/containers/image/v5/pkg/cli"
|
||||
"github.com/containers/image/v5/pkg/cli/sigstore"
|
||||
"github.com/containers/image/v5/signature/signer"
|
||||
"github.com/containers/image/v5/transports"
|
||||
"github.com/containers/image/v5/transports/alltransports"
|
||||
encconfig "github.com/containers/ocicrypt/config"
|
||||
@ -29,6 +31,7 @@ type copyOptions struct {
|
||||
additionalTags []string // For docker-archive: destinations, in addition to the name:tag specified as destination, also add these
|
||||
removeSignatures bool // Do not copy signatures from the source image
|
||||
signByFingerprint string // Sign the image using a GPG key with the specified fingerprint
|
||||
signBySigstoreParamFile string // Sign the image using a sigstore signature per configuration in a param file
|
||||
signBySigstorePrivateKey string // Sign the image using a sigstore private key
|
||||
signPassphraseFile string // Path pointing to a passphrase file when signing (for either signature format, but only one of them)
|
||||
signIdentity string // Identity of the signed image, must be a fully specified docker reference
|
||||
@ -83,6 +86,7 @@ See skopeo(1) section "IMAGE NAMES" for the expected format
|
||||
flags.BoolVar(&opts.preserveDigests, "preserve-digests", false, "Preserve digests of images and lists")
|
||||
flags.BoolVar(&opts.removeSignatures, "remove-signatures", false, "Do not copy signatures from SOURCE-IMAGE")
|
||||
flags.StringVar(&opts.signByFingerprint, "sign-by", "", "Sign the image using a GPG key with the specified `FINGERPRINT`")
|
||||
flags.StringVar(&opts.signBySigstoreParamFile, "sign-by-sigstore", "", "Sign the image using a sigstore parameter file at `PATH`")
|
||||
flags.StringVar(&opts.signBySigstorePrivateKey, "sign-by-sigstore-private-key", "", "Sign the image using a sigstore private key at `PATH`")
|
||||
flags.StringVar(&opts.signPassphraseFile, "sign-passphrase-file", "", "Read a passphrase for signing an image from `PATH`")
|
||||
flags.StringVar(&opts.signIdentity, "sign-identity", "", "Identity of signed image, must be a fully specified docker reference. Defaults to the target docker reference.")
|
||||
@ -252,6 +256,22 @@ func (opts *copyOptions) run(args []string, stdout io.Writer) (retErr error) {
|
||||
passphrase = p
|
||||
} // opts.signByFingerprint triggers a GPG-agent passphrase prompt, possibly using a more secure channel, so we usually shouldn’t prompt ourselves if no passphrase was explicitly provided.
|
||||
|
||||
var signers []*signer.Signer
|
||||
if opts.signBySigstoreParamFile != "" {
|
||||
signer, err := sigstore.NewSignerFromParameterFile(opts.signBySigstoreParamFile, &sigstore.Options{
|
||||
PrivateKeyPassphrasePrompt: func(keyFile string) (string, error) {
|
||||
return promptForPassphrase(keyFile, os.Stdin, os.Stdout)
|
||||
},
|
||||
Stdin: os.Stdin,
|
||||
Stdout: stdout,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error using --sign-by-sigstore: %w", err)
|
||||
}
|
||||
defer signer.Close()
|
||||
signers = append(signers, signer)
|
||||
}
|
||||
|
||||
var signIdentity reference.Named = nil
|
||||
if opts.signIdentity != "" {
|
||||
signIdentity, err = reference.ParseNamed(opts.signIdentity)
|
||||
@ -265,6 +285,7 @@ func (opts *copyOptions) run(args []string, stdout io.Writer) (retErr error) {
|
||||
return retry.IfNecessary(ctx, func() error {
|
||||
manifestBytes, err := copy.Image(ctx, policyContext, destRef, srcRef, ©.Options{
|
||||
RemoveSignatures: opts.removeSignatures,
|
||||
Signers: signers,
|
||||
SignBy: opts.signByFingerprint,
|
||||
SignPassphrase: passphrase,
|
||||
SignBySigstorePrivateKeyFile: opts.signBySigstorePrivateKey,
|
||||
|
@ -19,6 +19,8 @@ import (
|
||||
"github.com/containers/image/v5/docker"
|
||||
"github.com/containers/image/v5/docker/reference"
|
||||
"github.com/containers/image/v5/pkg/cli"
|
||||
"github.com/containers/image/v5/pkg/cli/sigstore"
|
||||
"github.com/containers/image/v5/signature/signer"
|
||||
"github.com/containers/image/v5/transports"
|
||||
"github.com/containers/image/v5/types"
|
||||
"github.com/opencontainers/go-digest"
|
||||
@ -36,6 +38,7 @@ type syncOptions struct {
|
||||
retryOpts *retry.Options
|
||||
removeSignatures bool // Do not copy signatures from the source image
|
||||
signByFingerprint string // Sign the image using a GPG key with the specified fingerprint
|
||||
signBySigstoreParamFile string // Sign the image using a sigstore signature per configuration in a param file
|
||||
signBySigstorePrivateKey string // Sign the image using a sigstore private key
|
||||
signPassphraseFile string // Path pointing to a passphrase file when signing
|
||||
format commonFlag.OptionalString // Force conversion of the image to a specified format
|
||||
@ -107,6 +110,7 @@ See skopeo-sync(1) for details.
|
||||
flags := cmd.Flags()
|
||||
flags.BoolVar(&opts.removeSignatures, "remove-signatures", false, "Do not copy signatures from SOURCE images")
|
||||
flags.StringVar(&opts.signByFingerprint, "sign-by", "", "Sign the image using a GPG key with the specified `FINGERPRINT`")
|
||||
flags.StringVar(&opts.signBySigstoreParamFile, "sign-by-sigstore", "", "Sign the image using a sigstore parameter file at `PATH`")
|
||||
flags.StringVar(&opts.signBySigstorePrivateKey, "sign-by-sigstore-private-key", "", "Sign the image using a sigstore private key at `PATH`")
|
||||
flags.StringVar(&opts.signPassphraseFile, "sign-passphrase-file", "", "File that contains a passphrase for the --sign-by key")
|
||||
flags.VarP(commonFlag.NewOptionalStringValue(&opts.format), "format", "f", `MANIFEST TYPE (oci, v2s1, or v2s2) to use when syncing image(s) to a destination (default is manifest type of source, with fallbacks)`)
|
||||
@ -604,8 +608,26 @@ func (opts *syncOptions) run(args []string, stdout io.Writer) (retErr error) {
|
||||
}
|
||||
passphrase = p
|
||||
}
|
||||
|
||||
var signers []*signer.Signer
|
||||
if opts.signBySigstoreParamFile != "" {
|
||||
signer, err := sigstore.NewSignerFromParameterFile(opts.signBySigstoreParamFile, &sigstore.Options{
|
||||
PrivateKeyPassphrasePrompt: func(keyFile string) (string, error) {
|
||||
return promptForPassphrase(keyFile, os.Stdin, os.Stdout)
|
||||
},
|
||||
Stdin: os.Stdin,
|
||||
Stdout: stdout,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error using --sign-by-sigstore: %w", err)
|
||||
}
|
||||
defer signer.Close()
|
||||
signers = append(signers, signer)
|
||||
}
|
||||
|
||||
options := copy.Options{
|
||||
RemoveSignatures: opts.removeSignatures,
|
||||
Signers: signers,
|
||||
SignBy: opts.signByFingerprint,
|
||||
SignPassphrase: passphrase,
|
||||
SignBySigstorePrivateKeyFile: opts.signBySigstorePrivateKey,
|
||||
|
@ -93,6 +93,11 @@ Do not copy signatures, if any, from _source-image_. Necessary when copying a si
|
||||
|
||||
Add a “simple signing” signature using that key ID for an image name corresponding to _destination-image_
|
||||
|
||||
**--sign-by-sigstore** _param-file_
|
||||
|
||||
Add a sigstore signature based on the options in the specified containers sigstore signing parameter file, _param-file_.
|
||||
See containers-sigstore-signing-params.yaml(5) for details about the file format.
|
||||
|
||||
**--sign-by-sigstore-private-key** _path_
|
||||
|
||||
Add a sigstore signature using a private key at _path_ for an image name corresponding to _destination-image_
|
||||
|
@ -76,6 +76,11 @@ Print usage statement.
|
||||
|
||||
Add a “simple signing” signature using that key ID for an image name corresponding to _destination-image_
|
||||
|
||||
**--sign-by-sigstore** _param-file_
|
||||
|
||||
Add a sigstore signature based on the options in the specified containers sigstore signing parameter file, _param-file_.
|
||||
See containers-sigstore-signing-params.yaml(5) for details about the file format.
|
||||
|
||||
**--sign-by-sigstore-private-key** _path_
|
||||
|
||||
Add a sigstore signature using a private key at _path_ for an image name corresponding to _destination-image_
|
||||
|
31
go.mod
31
go.mod
@ -27,9 +27,11 @@ require (
|
||||
github.com/Microsoft/hcsshim v0.9.6 // indirect
|
||||
github.com/VividCortex/ewma v1.2.0 // indirect
|
||||
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect
|
||||
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d // indirect
|
||||
github.com/containerd/cgroups v1.0.4 // indirect
|
||||
github.com/containerd/stargz-snapshotter/estargz v0.13.0 // indirect
|
||||
github.com/containers/libtrust v0.0.0-20200511145503-9c3a6c22cd9a // indirect
|
||||
github.com/coreos/go-oidc/v3 v3.4.0 // indirect
|
||||
github.com/cyphar/filepath-securejoin v0.2.3 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/docker/docker v20.10.22+incompatible // indirect
|
||||
@ -38,33 +40,55 @@ require (
|
||||
github.com/docker/go-units v0.5.0 // indirect
|
||||
github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 // indirect
|
||||
github.com/ghodss/yaml v1.0.0 // indirect
|
||||
github.com/go-openapi/analysis v0.21.4 // indirect
|
||||
github.com/go-openapi/errors v0.20.3 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.19.5 // indirect
|
||||
github.com/go-openapi/jsonreference v0.20.0 // indirect
|
||||
github.com/go-openapi/loads v0.21.2 // indirect
|
||||
github.com/go-openapi/runtime v0.24.1 // indirect
|
||||
github.com/go-openapi/spec v0.20.7 // indirect
|
||||
github.com/go-openapi/strfmt v0.21.3 // indirect
|
||||
github.com/go-openapi/swag v0.22.3 // indirect
|
||||
github.com/go-openapi/validate v0.22.0 // indirect
|
||||
github.com/go-playground/locales v0.14.0 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.0 // indirect
|
||||
github.com/go-playground/validator/v10 v10.11.1 // indirect
|
||||
github.com/gogo/protobuf v1.3.2 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
||||
github.com/golang/protobuf v1.5.2 // indirect
|
||||
github.com/google/go-containerregistry v0.12.1 // indirect
|
||||
github.com/google/go-intervals v0.0.2 // indirect
|
||||
github.com/google/trillian v1.5.0 // indirect
|
||||
github.com/google/uuid v1.3.0 // indirect
|
||||
github.com/gorilla/mux v1.8.0 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||
github.com/hashicorp/go-retryablehttp v0.7.2 // indirect
|
||||
github.com/imdario/mergo v0.3.13 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.0.1 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/klauspost/compress v1.15.14 // indirect
|
||||
github.com/klauspost/pgzip v1.2.6-0.20220930104621-17e8dac29df8 // indirect
|
||||
github.com/kr/pretty v0.3.0 // indirect
|
||||
github.com/kr/text v0.2.0 // indirect
|
||||
github.com/leodido/go-urn v1.2.1 // indirect
|
||||
github.com/letsencrypt/boulder v0.0.0-20221109233200-85aa52084eaf // indirect
|
||||
github.com/mailru/easyjson v0.7.7 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.14 // indirect
|
||||
github.com/mattn/go-shellwords v1.0.12 // indirect
|
||||
github.com/miekg/pkcs11 v1.1.1 // indirect
|
||||
github.com/mistifyio/go-zfs/v3 v3.0.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
github.com/moby/sys/mountinfo v0.6.2 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/opencontainers/runc v1.1.4 // indirect
|
||||
github.com/opencontainers/runtime-spec v1.0.3-0.20210326190908-1c3f411f0417 // indirect
|
||||
github.com/opencontainers/selinux v1.10.2 // indirect
|
||||
github.com/opentracing/opentracing-go v1.2.0 // indirect
|
||||
github.com/ostreedev/ostree-go v0.0.0-20210805093236-719684c64e4f // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
@ -72,7 +96,11 @@ require (
|
||||
github.com/rivo/uniseg v0.4.3 // indirect
|
||||
github.com/rogpeppe/go-internal v1.8.0 // indirect
|
||||
github.com/russross/blackfriday v2.0.0+incompatible // indirect
|
||||
github.com/segmentio/ksuid v1.0.4 // indirect
|
||||
github.com/sigstore/fulcio v1.0.0 // indirect
|
||||
github.com/sigstore/rekor v1.0.1 // indirect
|
||||
github.com/sigstore/sigstore v1.5.0 // indirect
|
||||
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 // indirect
|
||||
github.com/stefanberger/go-pkcs11uri v0.0.0-20201008174630-78d3cae3a980 // indirect
|
||||
github.com/sylabs/sif/v2 v2.9.0 // indirect
|
||||
github.com/tchap/go-patricia v2.3.0+incompatible // indirect
|
||||
@ -85,15 +113,18 @@ require (
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
|
||||
github.com/xeipuuv/gojsonschema v1.2.0 // indirect
|
||||
go.etcd.io/bbolt v1.3.6 // indirect
|
||||
go.mongodb.org/mongo-driver v1.11.1 // indirect
|
||||
go.mozilla.org/pkcs7 v0.0.0-20210826202110-33d05740a352 // indirect
|
||||
go.opencensus.io v0.24.0 // indirect
|
||||
golang.org/x/crypto v0.5.0 // indirect
|
||||
golang.org/x/mod v0.7.0 // indirect
|
||||
golang.org/x/net v0.5.0 // indirect
|
||||
golang.org/x/oauth2 v0.4.0 // indirect
|
||||
golang.org/x/sync v0.1.0 // indirect
|
||||
golang.org/x/sys v0.4.0 // indirect
|
||||
golang.org/x/text v0.6.0 // indirect
|
||||
golang.org/x/tools v0.4.0 // indirect
|
||||
google.golang.org/appengine v1.6.7 // indirect
|
||||
google.golang.org/genproto v0.0.0-20221207170731-23e4bf6bdc37 // indirect
|
||||
google.golang.org/grpc v1.51.0 // indirect
|
||||
google.golang.org/protobuf v1.28.1 // indirect
|
||||
|
43
go.sum
43
go.sum
@ -602,6 +602,7 @@ github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkY
|
||||
github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A=
|
||||
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
|
||||
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
|
||||
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d h1:Byv0BzEl3/e6D5CLfI0j/7hiIEtvGVFPCZ7Ei2oq8iQ=
|
||||
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
|
||||
github.com/ashanbrown/forbidigo v1.2.0/go.mod h1:vVW7PEdqEFqapJe95xHkTfB1+XvZXBFg8t0sG2FIxmI=
|
||||
github.com/ashanbrown/makezero v0.0.0-20210520155254-b6261585ddde/go.mod h1:oG9Dnez7/ESBqc4EdrdNlryeo7d0KcW1ftXHm7nU/UU=
|
||||
@ -941,7 +942,9 @@ github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8Nz
|
||||
github.com/coreos/go-iptables v0.4.5/go.mod h1:/mVI274lEDI2ns62jHCDnCyBF9Iwsmekav8Dbxlm1MU=
|
||||
github.com/coreos/go-iptables v0.5.0/go.mod h1:/mVI274lEDI2ns62jHCDnCyBF9Iwsmekav8Dbxlm1MU=
|
||||
github.com/coreos/go-iptables v0.6.0/go.mod h1:Qe8Bv2Xik5FyTXwgIbLAnv2sWSBmvWdFETJConOQ//Q=
|
||||
github.com/coreos/go-oidc v2.1.0+incompatible h1:sdJrfw8akMnCuUlaZU3tE/uYXFgfqom8DBE9so9EBsM=
|
||||
github.com/coreos/go-oidc v2.1.0+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc=
|
||||
github.com/coreos/go-oidc/v3 v3.4.0 h1:xz7elHb/LDwm/ERpwHd+5nb7wFHL32rsr6bBOgaeu6g=
|
||||
github.com/coreos/go-oidc/v3 v3.4.0/go.mod h1:eHUXhZtXPQLgEaDrOVTgwbgmz1xGOkJNye6h3zkD2Pw=
|
||||
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||
@ -1072,6 +1075,7 @@ github.com/facebookgo/subset v0.0.0-20200203212716-c811ad88dec4/go.mod h1:5tD+ne
|
||||
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
|
||||
github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM=
|
||||
github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w=
|
||||
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
|
||||
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
|
||||
github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94=
|
||||
@ -1144,32 +1148,40 @@ github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre
|
||||
github.com/go-ole/go-ole v1.2.5/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
||||
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
||||
github.com/go-openapi/analysis v0.21.2/go.mod h1:HZwRk4RRisyG8vx2Oe6aqeSQcoxRp47Xkp3+K6q+LdY=
|
||||
github.com/go-openapi/analysis v0.21.4 h1:ZDFLvSNxpDaomuCueM0BlSXxpANBlFYiBvr+GXrvIHc=
|
||||
github.com/go-openapi/analysis v0.21.4/go.mod h1:4zQ35W4neeZTqh3ol0rv/O8JBbka9QyAgQRPp9y3pfo=
|
||||
github.com/go-openapi/errors v0.19.8/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M=
|
||||
github.com/go-openapi/errors v0.19.9/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M=
|
||||
github.com/go-openapi/errors v0.20.2/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M=
|
||||
github.com/go-openapi/errors v0.20.3 h1:rz6kiC84sqNQoqrtulzaL/VERgkoCyB6WdEkc2ujzUc=
|
||||
github.com/go-openapi/errors v0.20.3/go.mod h1:Z3FlZ4I8jEGxjUK+bugx3on2mIAk4txuAOhlsB1FSgk=
|
||||
github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0=
|
||||
github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg=
|
||||
github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
|
||||
github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY=
|
||||
github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
|
||||
github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg=
|
||||
github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc=
|
||||
github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8=
|
||||
github.com/go-openapi/jsonreference v0.19.5/go.mod h1:RdybgQwPxbL4UEjuAruzK1x3nE69AqPYEJeo/TWfEeg=
|
||||
github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns=
|
||||
github.com/go-openapi/jsonreference v0.20.0 h1:MYlu0sBgChmCfJxxUKZ8g1cPWFOB37YSZqewK7OKeyA=
|
||||
github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo=
|
||||
github.com/go-openapi/loads v0.21.1/go.mod h1:/DtAMXXneXFjbQMGEtbamCZb+4x7eGwkvZCvBmwUG+g=
|
||||
github.com/go-openapi/loads v0.21.2 h1:r2a/xFIYeZ4Qd2TnGpWDIQNcP80dIaZgf704za8enro=
|
||||
github.com/go-openapi/loads v0.21.2/go.mod h1:Jq58Os6SSGz0rzh62ptiu8Z31I+OTHqmULx5e/gJbNw=
|
||||
github.com/go-openapi/runtime v0.24.1 h1:Sml5cgQKGYQHF+M7yYSHaH1eOjvTykrddTE/KtQVjqo=
|
||||
github.com/go-openapi/runtime v0.24.1/go.mod h1:AKurw9fNre+h3ELZfk6ILsfvPN+bvvlaU/M9q/r9hpk=
|
||||
github.com/go-openapi/spec v0.0.0-20160808142527-6aced65f8501/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc=
|
||||
github.com/go-openapi/spec v0.19.3/go.mod h1:FpwSN1ksY1eteniUU7X0N/BgJ7a4WvBFVA8Lj9mJglo=
|
||||
github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I=
|
||||
github.com/go-openapi/spec v0.20.6/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA=
|
||||
github.com/go-openapi/spec v0.20.7 h1:1Rlu/ZrOCCob0n+JKKJAWhNWMPW8bOZRg8FJaY+0SKI=
|
||||
github.com/go-openapi/spec v0.20.7/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA=
|
||||
github.com/go-openapi/strfmt v0.21.0/go.mod h1:ZRQ409bWMj+SOgXofQAGTIo2Ebu72Gs+WaRADcS5iNg=
|
||||
github.com/go-openapi/strfmt v0.21.1/go.mod h1:I/XVKeLc5+MM5oPNN7P6urMOpuLXEcNrCX/rPGuWb0k=
|
||||
github.com/go-openapi/strfmt v0.21.2/go.mod h1:I/XVKeLc5+MM5oPNN7P6urMOpuLXEcNrCX/rPGuWb0k=
|
||||
github.com/go-openapi/strfmt v0.21.3 h1:xwhj5X6CjXEZZHMWy1zKJxvW9AfHC9pkyUjLvHtKG7o=
|
||||
github.com/go-openapi/strfmt v0.21.3/go.mod h1:k+RzNO0Da+k3FrrynSNN8F7n/peCmQQqbbXjtDfvmGg=
|
||||
github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I=
|
||||
github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
|
||||
@ -1177,18 +1189,24 @@ github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh
|
||||
github.com/go-openapi/swag v0.19.14/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
|
||||
github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
|
||||
github.com/go-openapi/swag v0.21.1/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
|
||||
github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g=
|
||||
github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14=
|
||||
github.com/go-openapi/validate v0.21.0/go.mod h1:rjnrwK57VJ7A8xqfpAOEKRH8yQSGUriMu5/zuPSQ1hg=
|
||||
github.com/go-openapi/validate v0.22.0 h1:b0QecH6VslW/TxtpKgzpO1SNG7GU2FsaqKdP1E2T50Y=
|
||||
github.com/go-openapi/validate v0.22.0/go.mod h1:rjnrwK57VJ7A8xqfpAOEKRH8yQSGUriMu5/zuPSQ1hg=
|
||||
github.com/go-piv/piv-go v1.10.0/go.mod h1:NZ2zmjVkfFaL/CF8cVQ/pXdXtuj110zEKGdJM6fJZZM=
|
||||
github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A=
|
||||
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||
github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=
|
||||
github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU=
|
||||
github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs=
|
||||
github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=
|
||||
github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho=
|
||||
github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA=
|
||||
github.com/go-playground/validator/v10 v10.2.0/go.mod h1:uOYAAleCW8F/7oMFd6aG0GOhaH6EGOAJShg8Id5JGkI=
|
||||
github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=
|
||||
github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos=
|
||||
github.com/go-playground/validator/v10 v10.11.1 h1:prmOlTVv+YjZjmRmNSF3VmspqJIxJWXmqUsHwfTRRkQ=
|
||||
github.com/go-playground/validator/v10 v10.11.1/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU=
|
||||
github.com/go-redis/redis v6.15.8+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA=
|
||||
github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA=
|
||||
@ -1196,6 +1214,7 @@ github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq
|
||||
github.com/go-rod/rod v0.109.3/go.mod h1:GZDtmEs6RpF6kBRYpGCZXxXlKNneKVPiKOjaMbmVVjE=
|
||||
github.com/go-rod/rod v0.111.0/go.mod h1:GZDtmEs6RpF6kBRYpGCZXxXlKNneKVPiKOjaMbmVVjE=
|
||||
github.com/go-rod/rod v0.112.1/go.mod h1:fwfpjm+CpFaGn50i+l/ieqATte/FY1cvi8nsrq8DJjA=
|
||||
github.com/go-rod/rod v0.112.2 h1:dwauKYC/H2em8/BcGk3gC0LTzZHf5MIDKf2DVM4z9gU=
|
||||
github.com/go-rod/rod v0.112.2/go.mod h1:ElViL9ABbcshNQw93+11FrYRH92RRhMKleuILo6+5V0=
|
||||
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
||||
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
||||
@ -1328,6 +1347,7 @@ github.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzw
|
||||
github.com/golang-jwt/jwt/v4 v4.4.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
|
||||
github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
github.com/golang/glog v1.0.0 h1:nfP3RFugxnNRyKgeWd4oI1nYvXpxrx8ck8ZrcizshdQ=
|
||||
github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4=
|
||||
github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
@ -1456,6 +1476,7 @@ github.com/google/trillian v1.3.11/go.mod h1:0tPraVHrSDkA3BO6vKX67zgLXs6SsOAbHEi
|
||||
github.com/google/trillian v1.3.14-0.20210409160123-c5ea3abd4a41/go.mod h1:1dPv0CUjNQVFEDuAUFhZql16pw/VlPgaX8qj+g5pVzQ=
|
||||
github.com/google/trillian v1.3.14-0.20210511103300-67b5f349eefa/go.mod h1:s4jO3Ai4NSvxucdvqUHON0bCqJyoya32eNw6XJwsmNc=
|
||||
github.com/google/trillian v1.4.1/go.mod h1:43IVCsGXxP5mZK9yFkTQdQrMQm/wryNBV2GNEdqzVz8=
|
||||
github.com/google/trillian v1.5.0 h1:I5pIN18bKlXtlj1Tk919rQ3mWBU2BzNNR6JhLISGMB4=
|
||||
github.com/google/trillian v1.5.0/go.mod h1:2/gAIc+G1MUcErOPc+cSwHAQHZlGy+RYHjVGnhUQ3e8=
|
||||
github.com/google/uuid v0.0.0-20161128191214-064e2069ce9c/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
@ -1546,6 +1567,7 @@ github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY
|
||||
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
|
||||
github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ=
|
||||
github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
|
||||
@ -1553,6 +1575,7 @@ github.com/hashicorp/go-hclog v0.14.1/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39
|
||||
github.com/hashicorp/go-hclog v0.16.2/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
|
||||
github.com/hashicorp/go-hclog v1.2.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
|
||||
github.com/hashicorp/go-hclog v1.2.1/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M=
|
||||
github.com/hashicorp/go-hclog v1.3.1 h1:vDwF1DFNZhntP4DAjuTpOw3uEgMUpXh1pB5fW9DqHpo=
|
||||
github.com/hashicorp/go-hclog v1.3.1/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M=
|
||||
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
|
||||
github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
|
||||
@ -1572,6 +1595,7 @@ github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es
|
||||
github.com/hashicorp/go-retryablehttp v0.6.4/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY=
|
||||
github.com/hashicorp/go-retryablehttp v0.6.6/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY=
|
||||
github.com/hashicorp/go-retryablehttp v0.7.1/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY=
|
||||
github.com/hashicorp/go-retryablehttp v0.7.2 h1:AcYqCvkpalPnPF2pn0KamgwamS42TqUDDYFRKq/RAd0=
|
||||
github.com/hashicorp/go-retryablehttp v0.7.2/go.mod h1:Jy/gPYAdjqffZ/yFGCFV2doI5wjtH1ewM9u8iYVjtX8=
|
||||
github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU=
|
||||
github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
|
||||
@ -1744,6 +1768,7 @@ github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22
|
||||
github.com/jonboulle/clockwork v0.2.0/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8=
|
||||
github.com/jonboulle/clockwork v0.2.2/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8=
|
||||
github.com/jonboulle/clockwork v0.3.0/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8=
|
||||
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||
github.com/josharian/txtarfs v0.0.0-20210218200122-0702f000015a/go.mod h1:izVPOvVRsHiKkeGCT6tYBNWyDVuzj9wAaBb5R9qamfw=
|
||||
github.com/jpillora/backoff v0.0.0-20180909062703-3050d21c67d7/go.mod h1:2iMrUgbbvHEiQClaW2NsSzMyGHqN+rDFqY705q49KG0=
|
||||
@ -1822,6 +1847,7 @@ github.com/labstack/gommon v0.3.1/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3
|
||||
github.com/ldez/gomoddirectives v0.2.2/go.mod h1:cpgBogWITnCfRq2qGoDkKMEVSaarhdBr6g8G04uz6d0=
|
||||
github.com/ldez/tagliatelle v0.2.0/go.mod h1:8s6WJQwEYHbKZDsp/LjArytKOG8qaMrKQQ3mFukHs88=
|
||||
github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
|
||||
github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w=
|
||||
github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
|
||||
github.com/letsencrypt/boulder v0.0.0-20220723181115-27de4befb95e/go.mod h1:54WQpg5QI0mpRhxoj9bxysLqA5WJylVsLtXOrb3zAiU=
|
||||
github.com/letsencrypt/boulder v0.0.0-20220929215747-76583552c2be/go.mod h1:j/WMsOEcTSfy6VR1PkiIo20qH1V9iRRzb7ishoKkN0g=
|
||||
@ -1859,6 +1885,7 @@ github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN
|
||||
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||
github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=
|
||||
github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
|
||||
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||
github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg=
|
||||
github.com/maratori/testpackage v1.0.1/go.mod h1:ddKdw+XG0Phzhx8BFDTKgpWP4i7MpApTE5fXSKAqwDU=
|
||||
@ -1878,6 +1905,7 @@ github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope
|
||||
github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||
github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
|
||||
github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
|
||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-ieproxy v0.0.0-20190610004146-91bb50d98149/go.mod h1:31jz6HNzdxOmlERGGEc4v/dMssOfmp2p5bT/okiKFFc=
|
||||
github.com/mattn/go-ieproxy v0.0.1/go.mod h1:pYabZ6IHcRpFh7vIaLfK7rdcWgFEb3SFJ6/gNWuh88E=
|
||||
@ -1891,6 +1919,7 @@ github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcME
|
||||
github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=
|
||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
|
||||
github.com/mattn/go-isatty v0.0.16 h1:bq3VjFmv/sOjHtdEhmkEV4x1AJtvUvOJ2PFAZ5+peKQ=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
||||
github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
||||
@ -1961,6 +1990,7 @@ github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RR
|
||||
github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/mitchellh/mapstructure v1.4.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
||||
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/mitchellh/osext v0.0.0-20151018003038-5e2d6d41470f/go.mod h1:OkQIRizQZAeMln+1tSwduZz7+Af5oFlKirV/MSYes2A=
|
||||
github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
||||
@ -2027,6 +2057,7 @@ github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+
|
||||
github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs=
|
||||
github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA=
|
||||
github.com/oklog/run v1.1.0/go.mod h1:sVPdnTZT1zYwAJeCMu2Th4T21pA3FPOQRfWjQlk7DVU=
|
||||
github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=
|
||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||
github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo=
|
||||
github.com/olekukonko/tablewriter v0.0.1/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo=
|
||||
@ -2122,6 +2153,7 @@ github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go
|
||||
github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74=
|
||||
github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
||||
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
||||
github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs=
|
||||
github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc=
|
||||
github.com/openzipkin-contrib/zipkin-go-opentracing v0.4.5/go.mod h1:/wsWhb9smxSfWAKL3wpBW7V8scJMt8N8gnaMCS9E/cA=
|
||||
github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
|
||||
@ -2303,6 +2335,7 @@ github.com/seccomp/libseccomp-golang v0.10.0/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9g
|
||||
github.com/secure-systems-lab/go-securesystemslib v0.3.0/go.mod h1:o8hhjkbNl2gOamKUA/eNW3xUrntHT9L4W89W1nfj43U=
|
||||
github.com/secure-systems-lab/go-securesystemslib v0.4.0/go.mod h1:FGBZgq2tXWICsxWQW1msNf49F0Pf2Op5Htayx335Qbs=
|
||||
github.com/securego/gosec/v2 v2.9.1/go.mod h1:oDcDLcatOJxkCGaCaq8lua1jTnYf6Sou4wdiJ1n4iHc=
|
||||
github.com/segmentio/ksuid v1.0.4 h1:sBo2BdShXjmcugAMwjugoGUdUV0pcxY5mW4xKRn3v4c=
|
||||
github.com/segmentio/ksuid v1.0.4/go.mod h1:/XUiZBD3kVx5SmUOl55voK5yeAbBNNIed+2O73XgrPE=
|
||||
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
||||
@ -2317,7 +2350,9 @@ github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxr
|
||||
github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ=
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||
github.com/sigstore/fulcio v1.0.0 h1:hBZW6qg9GXTtCX8jOg1hmyjYLrmsEKZGeMwAbW3XNEg=
|
||||
github.com/sigstore/fulcio v1.0.0/go.mod h1:j4MzLxX/Be0rHYh3JF2dgMorkWGzEMHBqIHwFU8I/Rw=
|
||||
github.com/sigstore/rekor v1.0.1 h1:rcESXSNkAPRWFYZel9rarspdvneET60F2ngNkadi89c=
|
||||
github.com/sigstore/rekor v1.0.1/go.mod h1:ecTKdZWGWqE1pl3U1m1JebQJLU/hSjD9vYHOmHQ7w4g=
|
||||
github.com/sigstore/sigstore v1.4.2/go.mod h1:wCv58Fia7u1snVJyPcxdgIh/3uw1XdOLhxPExTwwyt4=
|
||||
github.com/sigstore/sigstore v1.4.4/go.mod h1:wIqu9sN72+pds31MMu89GchxXHy17k+VZWc+HY1ZXMA=
|
||||
@ -2337,6 +2372,7 @@ github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic
|
||||
github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0=
|
||||
github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/sivchari/tenv v1.4.7/go.mod h1:5nF+bITvkebQVanjU6IuMbvIot/7ReNsUV7I5NbprB0=
|
||||
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA=
|
||||
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog=
|
||||
github.com/smallstep/assert v0.0.0-20200723003110-82e2b9b3b262/go.mod h1:MyOHs9Po2fbM1LHej6sBUT8ozbxmMOFG+E+rx/GSGuc=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
@ -2449,6 +2485,7 @@ github.com/theupdateframework/go-tuf v0.5.2-0.20220930112810-3890c1e7ace4/go.mod
|
||||
github.com/theupdateframework/go-tuf v0.5.2-0.20221207161717-9cb61d6e65f5 h1:s+Yvt6bzRwHljSE7j6DLBDcfpZEdBhrvLgOUmd8f7ZM=
|
||||
github.com/theupdateframework/go-tuf v0.5.2-0.20221207161717-9cb61d6e65f5/go.mod h1:Le8NAjvDJK1vmLgpVYr4AR1Tqam/b/mTdQyTy37UJDA=
|
||||
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
||||
github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs=
|
||||
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
github.com/tilinna/clock v1.0.2/go.mod h1:ZsP7BcY7sEEz7ktc0IVy8Us6boDrK8VradlKRUGfOao=
|
||||
github.com/tilinna/clock v1.1.0/go.mod h1:ZsP7BcY7sEEz7ktc0IVy8Us6boDrK8VradlKRUGfOao=
|
||||
@ -2553,13 +2590,16 @@ github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778/go.mod h1:2MuV+tbUrU1z
|
||||
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
||||
github.com/yeya24/promlinter v0.1.0/go.mod h1:rs5vtZzeBHqqMwXqFScncpCF6u06lezhZepno9AB1Oc=
|
||||
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
|
||||
github.com/ysmood/goob v0.4.0 h1:HsxXhyLBeGzWXnqVKtmT9qM7EuVs/XOgkX7T6r1o1AQ=
|
||||
github.com/ysmood/goob v0.4.0/go.mod h1:u6yx7ZhS4Exf2MwciFr6nIM8knHQIE22lFpWHnfql18=
|
||||
github.com/ysmood/got v0.31.3/go.mod h1:pE1l4LOwOBhQg6A/8IAatkGp7uZjnalzrZolnlhhMgY=
|
||||
github.com/ysmood/got v0.32.0/go.mod h1:pE1l4LOwOBhQg6A/8IAatkGp7uZjnalzrZolnlhhMgY=
|
||||
github.com/ysmood/gotrace v0.6.0/go.mod h1:TzhIG7nHDry5//eYZDYcTzuJLYQIkykJzCRIo4/dzQM=
|
||||
github.com/ysmood/gson v0.7.1/go.mod h1:3Kzs5zDl21g5F/BlLTNcuAGAYLKt2lV5G8D1zF3RNmg=
|
||||
github.com/ysmood/gson v0.7.2/go.mod h1:3Kzs5zDl21g5F/BlLTNcuAGAYLKt2lV5G8D1zF3RNmg=
|
||||
github.com/ysmood/gson v0.7.3 h1:QFkWbTH8MxyUTKPkVWAENJhxqdBa4lYTQWqZCiLG6kE=
|
||||
github.com/ysmood/gson v0.7.3/go.mod h1:3Kzs5zDl21g5F/BlLTNcuAGAYLKt2lV5G8D1zF3RNmg=
|
||||
github.com/ysmood/leakless v0.8.0 h1:BzLrVoiwxikpgEQR0Lk8NyBN5Cit2b1z+u0mgL4ZJak=
|
||||
github.com/ysmood/leakless v0.8.0/go.mod h1:R8iAXPRaG97QJwqxs74RdwzcRHT1SWCGTNqY8q0JvMQ=
|
||||
github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg=
|
||||
github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM=
|
||||
@ -2626,6 +2666,7 @@ go.mongodb.org/mongo-driver v1.7.3/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R7
|
||||
go.mongodb.org/mongo-driver v1.7.5/go.mod h1:VXEWRZ6URJIkUq2SCAyapmhH0ZLRBP+FT4xhp5Zvxng=
|
||||
go.mongodb.org/mongo-driver v1.8.3/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY=
|
||||
go.mongodb.org/mongo-driver v1.10.0/go.mod h1:wsihk0Kdgv8Kqu1Anit4sfK+22vSFbUrAVEYRhCXrA8=
|
||||
go.mongodb.org/mongo-driver v1.11.1 h1:QP0znIRTuL0jf1oBQoAoM0C6ZJfBK4kx0Uumtv1A7w8=
|
||||
go.mongodb.org/mongo-driver v1.11.1/go.mod h1:s7p5vEtfbeR1gYi6pnj3c3/urpbLv2T5Sfd6Rp2HBB8=
|
||||
go.mozilla.org/mozlog v0.0.0-20170222151521-4bb13139d403/go.mod h1:jHoPAGnDrCy6kaI2tAze5Prf0Nr0w/oNkROt2lw3n3o=
|
||||
go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1/go.mod h1:SNgMg+EgDFwmvSmLRTNKC5fegJjB7v23qTQ0XLGUNHk=
|
||||
@ -2942,6 +2983,7 @@ golang.org/x/oauth2 v0.0.0-20221014153046-6fdb5e3db783/go.mod h1:h4gKUeWbJ4rQPri
|
||||
golang.org/x/oauth2 v0.1.0/go.mod h1:G9FE4dLTsbXUu90h/Pf85g4w1D+SSAgR+q46nJZ8M4A=
|
||||
golang.org/x/oauth2 v0.2.0/go.mod h1:Cwn6afJ8jrQwYMxQDTpISoXmXW9I6qF6vDeuuoX3Ibs=
|
||||
golang.org/x/oauth2 v0.3.0/go.mod h1:rQrIauxkUhJ6CuwEXwymO2/eh4xz2ZWF1nBkcxS+tGk=
|
||||
golang.org/x/oauth2 v0.4.0 h1:NF0gk8LVPg1Ml7SSbGyySuoxdsXitj7TvgvuRxIMc/M=
|
||||
golang.org/x/oauth2 v0.4.0/go.mod h1:RznEsdpjGAINPTOF0UH/t+xJ75L18YO3Ho6Pyn+uRec=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@ -3395,6 +3437,7 @@ google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww
|
||||
google.golang.org/appengine v1.6.2/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
|
||||
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c=
|
||||
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/cloud v0.0.0-20151119220103-975617b05ea8/go.mod h1:0H1ncTHf11KCFhTc/+EFRbzSCOZx+VUbRMk55Yv5MYk=
|
||||
google.golang.org/genproto v0.0.0-20170818010345-ee236bd376b0/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
|
15
vendor/github.com/asaskevich/govalidator/.gitignore
generated
vendored
Normal file
15
vendor/github.com/asaskevich/govalidator/.gitignore
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
bin/
|
||||
.idea/
|
||||
# Binaries for programs and plugins
|
||||
*.exe
|
||||
*.exe~
|
||||
*.dll
|
||||
*.so
|
||||
*.dylib
|
||||
|
||||
# Test binary, built with `go test -c`
|
||||
*.test
|
||||
|
||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||
*.out
|
||||
|
12
vendor/github.com/asaskevich/govalidator/.travis.yml
generated
vendored
Normal file
12
vendor/github.com/asaskevich/govalidator/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
language: go
|
||||
dist: xenial
|
||||
go:
|
||||
- '1.10'
|
||||
- '1.11'
|
||||
- '1.12'
|
||||
- '1.13'
|
||||
- 'tip'
|
||||
|
||||
script:
|
||||
- go test -coverpkg=./... -coverprofile=coverage.info -timeout=5s
|
||||
- bash <(curl -s https://codecov.io/bash)
|
43
vendor/github.com/asaskevich/govalidator/CODE_OF_CONDUCT.md
generated
vendored
Normal file
43
vendor/github.com/asaskevich/govalidator/CODE_OF_CONDUCT.md
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
# Contributor Code of Conduct
|
||||
|
||||
This project adheres to [The Code Manifesto](http://codemanifesto.com)
|
||||
as its guidelines for contributor interactions.
|
||||
|
||||
## The Code Manifesto
|
||||
|
||||
We want to work in an ecosystem that empowers developers to reach their
|
||||
potential — one that encourages growth and effective collaboration. A space
|
||||
that is safe for all.
|
||||
|
||||
A space such as this benefits everyone that participates in it. It encourages
|
||||
new developers to enter our field. It is through discussion and collaboration
|
||||
that we grow, and through growth that we improve.
|
||||
|
||||
In the effort to create such a place, we hold to these values:
|
||||
|
||||
1. **Discrimination limits us.** This includes discrimination on the basis of
|
||||
race, gender, sexual orientation, gender identity, age, nationality,
|
||||
technology and any other arbitrary exclusion of a group of people.
|
||||
2. **Boundaries honor us.** Your comfort levels are not everyone’s comfort
|
||||
levels. Remember that, and if brought to your attention, heed it.
|
||||
3. **We are our biggest assets.** None of us were born masters of our trade.
|
||||
Each of us has been helped along the way. Return that favor, when and where
|
||||
you can.
|
||||
4. **We are resources for the future.** As an extension of #3, share what you
|
||||
know. Make yourself a resource to help those that come after you.
|
||||
5. **Respect defines us.** Treat others as you wish to be treated. Make your
|
||||
discussions, criticisms and debates from a position of respectfulness. Ask
|
||||
yourself, is it true? Is it necessary? Is it constructive? Anything less is
|
||||
unacceptable.
|
||||
6. **Reactions require grace.** Angry responses are valid, but abusive language
|
||||
and vindictive actions are toxic. When something happens that offends you,
|
||||
handle it assertively, but be respectful. Escalate reasonably, and try to
|
||||
allow the offender an opportunity to explain themselves, and possibly
|
||||
correct the issue.
|
||||
7. **Opinions are just that: opinions.** Each and every one of us, due to our
|
||||
background and upbringing, have varying opinions. That is perfectly
|
||||
acceptable. Remember this: if you respect your own opinions, you should
|
||||
respect the opinions of others.
|
||||
8. **To err is human.** You might not intend it, but mistakes do happen and
|
||||
contribute to build experience. Tolerate honest mistakes, and don't
|
||||
hesitate to apologize if you make one yourself.
|
63
vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md
generated
vendored
Normal file
63
vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md
generated
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
#### Support
|
||||
If you do have a contribution to the package, feel free to create a Pull Request or an Issue.
|
||||
|
||||
#### What to contribute
|
||||
If you don't know what to do, there are some features and functions that need to be done
|
||||
|
||||
- [ ] Refactor code
|
||||
- [ ] Edit docs and [README](https://github.com/asaskevich/govalidator/README.md): spellcheck, grammar and typo check
|
||||
- [ ] Create actual list of contributors and projects that currently using this package
|
||||
- [ ] Resolve [issues and bugs](https://github.com/asaskevich/govalidator/issues)
|
||||
- [ ] Update actual [list of functions](https://github.com/asaskevich/govalidator#list-of-functions)
|
||||
- [ ] Update [list of validators](https://github.com/asaskevich/govalidator#validatestruct-2) that available for `ValidateStruct` and add new
|
||||
- [ ] Implement new validators: `IsFQDN`, `IsIMEI`, `IsPostalCode`, `IsISIN`, `IsISRC` etc
|
||||
- [x] Implement [validation by maps](https://github.com/asaskevich/govalidator/issues/224)
|
||||
- [ ] Implement fuzzing testing
|
||||
- [ ] Implement some struct/map/array utilities
|
||||
- [ ] Implement map/array validation
|
||||
- [ ] Implement benchmarking
|
||||
- [ ] Implement batch of examples
|
||||
- [ ] Look at forks for new features and fixes
|
||||
|
||||
#### Advice
|
||||
Feel free to create what you want, but keep in mind when you implement new features:
|
||||
- Code must be clear and readable, names of variables/constants clearly describes what they are doing
|
||||
- Public functions must be documented and described in source file and added to README.md to the list of available functions
|
||||
- There are must be unit-tests for any new functions and improvements
|
||||
|
||||
## Financial contributions
|
||||
|
||||
We also welcome financial contributions in full transparency on our [open collective](https://opencollective.com/govalidator).
|
||||
Anyone can file an expense. If the expense makes sense for the development of the community, it will be "merged" in the ledger of our open collective by the core contributors and the person who filed the expense will be reimbursed.
|
||||
|
||||
|
||||
## Credits
|
||||
|
||||
|
||||
### Contributors
|
||||
|
||||
Thank you to all the people who have already contributed to govalidator!
|
||||
<a href="https://github.com/asaskevich/govalidator/graphs/contributors"><img src="https://opencollective.com/govalidator/contributors.svg?width=890" /></a>
|
||||
|
||||
|
||||
### Backers
|
||||
|
||||
Thank you to all our backers! [[Become a backer](https://opencollective.com/govalidator#backer)]
|
||||
|
||||
<a href="https://opencollective.com/govalidator#backers" target="_blank"><img src="https://opencollective.com/govalidator/backers.svg?width=890"></a>
|
||||
|
||||
|
||||
### Sponsors
|
||||
|
||||
Thank you to all our sponsors! (please ask your company to also support this open source project by [becoming a sponsor](https://opencollective.com/govalidator#sponsor))
|
||||
|
||||
<a href="https://opencollective.com/govalidator/sponsor/0/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/0/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/govalidator/sponsor/1/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/1/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/govalidator/sponsor/2/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/2/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/govalidator/sponsor/3/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/3/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/govalidator/sponsor/4/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/4/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/govalidator/sponsor/5/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/5/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/govalidator/sponsor/6/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/6/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/govalidator/sponsor/7/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/7/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/govalidator/sponsor/8/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/8/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/govalidator/sponsor/9/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/9/avatar.svg"></a>
|
21
vendor/github.com/asaskevich/govalidator/LICENSE
generated
vendored
Normal file
21
vendor/github.com/asaskevich/govalidator/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014-2020 Alex Saskevich
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
622
vendor/github.com/asaskevich/govalidator/README.md
generated
vendored
Normal file
622
vendor/github.com/asaskevich/govalidator/README.md
generated
vendored
Normal file
@ -0,0 +1,622 @@
|
||||
govalidator
|
||||
===========
|
||||
[](https://gitter.im/asaskevich/govalidator?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [](https://godoc.org/github.com/asaskevich/govalidator)
|
||||
[](https://travis-ci.org/asaskevich/govalidator)
|
||||
[](https://codecov.io/gh/asaskevich/govalidator) [](https://goreportcard.com/report/github.com/asaskevich/govalidator) [](http://go-search.org/view?id=github.com%2Fasaskevich%2Fgovalidator) [](#backers) [](#sponsors) [](https://app.fossa.io/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator?ref=badge_shield)
|
||||
|
||||
A package of validators and sanitizers for strings, structs and collections. Based on [validator.js](https://github.com/chriso/validator.js).
|
||||
|
||||
#### Installation
|
||||
Make sure that Go is installed on your computer.
|
||||
Type the following command in your terminal:
|
||||
|
||||
go get github.com/asaskevich/govalidator
|
||||
|
||||
or you can get specified release of the package with `gopkg.in`:
|
||||
|
||||
go get gopkg.in/asaskevich/govalidator.v10
|
||||
|
||||
After it the package is ready to use.
|
||||
|
||||
|
||||
#### Import package in your project
|
||||
Add following line in your `*.go` file:
|
||||
```go
|
||||
import "github.com/asaskevich/govalidator"
|
||||
```
|
||||
If you are unhappy to use long `govalidator`, you can do something like this:
|
||||
```go
|
||||
import (
|
||||
valid "github.com/asaskevich/govalidator"
|
||||
)
|
||||
```
|
||||
|
||||
#### Activate behavior to require all fields have a validation tag by default
|
||||
`SetFieldsRequiredByDefault` causes validation to fail when struct fields do not include validations or are not explicitly marked as exempt (using `valid:"-"` or `valid:"email,optional"`). A good place to activate this is a package init function or the main() function.
|
||||
|
||||
`SetNilPtrAllowedByRequired` causes validation to pass when struct fields marked by `required` are set to nil. This is disabled by default for consistency, but some packages that need to be able to determine between `nil` and `zero value` state can use this. If disabled, both `nil` and `zero` values cause validation errors.
|
||||
|
||||
```go
|
||||
import "github.com/asaskevich/govalidator"
|
||||
|
||||
func init() {
|
||||
govalidator.SetFieldsRequiredByDefault(true)
|
||||
}
|
||||
```
|
||||
|
||||
Here's some code to explain it:
|
||||
```go
|
||||
// this struct definition will fail govalidator.ValidateStruct() (and the field values do not matter):
|
||||
type exampleStruct struct {
|
||||
Name string ``
|
||||
Email string `valid:"email"`
|
||||
}
|
||||
|
||||
// this, however, will only fail when Email is empty or an invalid email address:
|
||||
type exampleStruct2 struct {
|
||||
Name string `valid:"-"`
|
||||
Email string `valid:"email"`
|
||||
}
|
||||
|
||||
// lastly, this will only fail when Email is an invalid email address but not when it's empty:
|
||||
type exampleStruct2 struct {
|
||||
Name string `valid:"-"`
|
||||
Email string `valid:"email,optional"`
|
||||
}
|
||||
```
|
||||
|
||||
#### Recent breaking changes (see [#123](https://github.com/asaskevich/govalidator/pull/123))
|
||||
##### Custom validator function signature
|
||||
A context was added as the second parameter, for structs this is the object being validated – this makes dependent validation possible.
|
||||
```go
|
||||
import "github.com/asaskevich/govalidator"
|
||||
|
||||
// old signature
|
||||
func(i interface{}) bool
|
||||
|
||||
// new signature
|
||||
func(i interface{}, o interface{}) bool
|
||||
```
|
||||
|
||||
##### Adding a custom validator
|
||||
This was changed to prevent data races when accessing custom validators.
|
||||
```go
|
||||
import "github.com/asaskevich/govalidator"
|
||||
|
||||
// before
|
||||
govalidator.CustomTypeTagMap["customByteArrayValidator"] = func(i interface{}, o interface{}) bool {
|
||||
// ...
|
||||
}
|
||||
|
||||
// after
|
||||
govalidator.CustomTypeTagMap.Set("customByteArrayValidator", func(i interface{}, o interface{}) bool {
|
||||
// ...
|
||||
})
|
||||
```
|
||||
|
||||
#### List of functions:
|
||||
```go
|
||||
func Abs(value float64) float64
|
||||
func BlackList(str, chars string) string
|
||||
func ByteLength(str string, params ...string) bool
|
||||
func CamelCaseToUnderscore(str string) string
|
||||
func Contains(str, substring string) bool
|
||||
func Count(array []interface{}, iterator ConditionIterator) int
|
||||
func Each(array []interface{}, iterator Iterator)
|
||||
func ErrorByField(e error, field string) string
|
||||
func ErrorsByField(e error) map[string]string
|
||||
func Filter(array []interface{}, iterator ConditionIterator) []interface{}
|
||||
func Find(array []interface{}, iterator ConditionIterator) interface{}
|
||||
func GetLine(s string, index int) (string, error)
|
||||
func GetLines(s string) []string
|
||||
func HasLowerCase(str string) bool
|
||||
func HasUpperCase(str string) bool
|
||||
func HasWhitespace(str string) bool
|
||||
func HasWhitespaceOnly(str string) bool
|
||||
func InRange(value interface{}, left interface{}, right interface{}) bool
|
||||
func InRangeFloat32(value, left, right float32) bool
|
||||
func InRangeFloat64(value, left, right float64) bool
|
||||
func InRangeInt(value, left, right interface{}) bool
|
||||
func IsASCII(str string) bool
|
||||
func IsAlpha(str string) bool
|
||||
func IsAlphanumeric(str string) bool
|
||||
func IsBase64(str string) bool
|
||||
func IsByteLength(str string, min, max int) bool
|
||||
func IsCIDR(str string) bool
|
||||
func IsCRC32(str string) bool
|
||||
func IsCRC32b(str string) bool
|
||||
func IsCreditCard(str string) bool
|
||||
func IsDNSName(str string) bool
|
||||
func IsDataURI(str string) bool
|
||||
func IsDialString(str string) bool
|
||||
func IsDivisibleBy(str, num string) bool
|
||||
func IsEmail(str string) bool
|
||||
func IsExistingEmail(email string) bool
|
||||
func IsFilePath(str string) (bool, int)
|
||||
func IsFloat(str string) bool
|
||||
func IsFullWidth(str string) bool
|
||||
func IsHalfWidth(str string) bool
|
||||
func IsHash(str string, algorithm string) bool
|
||||
func IsHexadecimal(str string) bool
|
||||
func IsHexcolor(str string) bool
|
||||
func IsHost(str string) bool
|
||||
func IsIP(str string) bool
|
||||
func IsIPv4(str string) bool
|
||||
func IsIPv6(str string) bool
|
||||
func IsISBN(str string, version int) bool
|
||||
func IsISBN10(str string) bool
|
||||
func IsISBN13(str string) bool
|
||||
func IsISO3166Alpha2(str string) bool
|
||||
func IsISO3166Alpha3(str string) bool
|
||||
func IsISO4217(str string) bool
|
||||
func IsISO693Alpha2(str string) bool
|
||||
func IsISO693Alpha3b(str string) bool
|
||||
func IsIn(str string, params ...string) bool
|
||||
func IsInRaw(str string, params ...string) bool
|
||||
func IsInt(str string) bool
|
||||
func IsJSON(str string) bool
|
||||
func IsLatitude(str string) bool
|
||||
func IsLongitude(str string) bool
|
||||
func IsLowerCase(str string) bool
|
||||
func IsMAC(str string) bool
|
||||
func IsMD4(str string) bool
|
||||
func IsMD5(str string) bool
|
||||
func IsMagnetURI(str string) bool
|
||||
func IsMongoID(str string) bool
|
||||
func IsMultibyte(str string) bool
|
||||
func IsNatural(value float64) bool
|
||||
func IsNegative(value float64) bool
|
||||
func IsNonNegative(value float64) bool
|
||||
func IsNonPositive(value float64) bool
|
||||
func IsNotNull(str string) bool
|
||||
func IsNull(str string) bool
|
||||
func IsNumeric(str string) bool
|
||||
func IsPort(str string) bool
|
||||
func IsPositive(value float64) bool
|
||||
func IsPrintableASCII(str string) bool
|
||||
func IsRFC3339(str string) bool
|
||||
func IsRFC3339WithoutZone(str string) bool
|
||||
func IsRGBcolor(str string) bool
|
||||
func IsRegex(str string) bool
|
||||
func IsRequestURI(rawurl string) bool
|
||||
func IsRequestURL(rawurl string) bool
|
||||
func IsRipeMD128(str string) bool
|
||||
func IsRipeMD160(str string) bool
|
||||
func IsRsaPub(str string, params ...string) bool
|
||||
func IsRsaPublicKey(str string, keylen int) bool
|
||||
func IsSHA1(str string) bool
|
||||
func IsSHA256(str string) bool
|
||||
func IsSHA384(str string) bool
|
||||
func IsSHA512(str string) bool
|
||||
func IsSSN(str string) bool
|
||||
func IsSemver(str string) bool
|
||||
func IsTiger128(str string) bool
|
||||
func IsTiger160(str string) bool
|
||||
func IsTiger192(str string) bool
|
||||
func IsTime(str string, format string) bool
|
||||
func IsType(v interface{}, params ...string) bool
|
||||
func IsURL(str string) bool
|
||||
func IsUTFDigit(str string) bool
|
||||
func IsUTFLetter(str string) bool
|
||||
func IsUTFLetterNumeric(str string) bool
|
||||
func IsUTFNumeric(str string) bool
|
||||
func IsUUID(str string) bool
|
||||
func IsUUIDv3(str string) bool
|
||||
func IsUUIDv4(str string) bool
|
||||
func IsUUIDv5(str string) bool
|
||||
func IsULID(str string) bool
|
||||
func IsUnixTime(str string) bool
|
||||
func IsUpperCase(str string) bool
|
||||
func IsVariableWidth(str string) bool
|
||||
func IsWhole(value float64) bool
|
||||
func LeftTrim(str, chars string) string
|
||||
func Map(array []interface{}, iterator ResultIterator) []interface{}
|
||||
func Matches(str, pattern string) bool
|
||||
func MaxStringLength(str string, params ...string) bool
|
||||
func MinStringLength(str string, params ...string) bool
|
||||
func NormalizeEmail(str string) (string, error)
|
||||
func PadBoth(str string, padStr string, padLen int) string
|
||||
func PadLeft(str string, padStr string, padLen int) string
|
||||
func PadRight(str string, padStr string, padLen int) string
|
||||
func PrependPathToErrors(err error, path string) error
|
||||
func Range(str string, params ...string) bool
|
||||
func RemoveTags(s string) string
|
||||
func ReplacePattern(str, pattern, replace string) string
|
||||
func Reverse(s string) string
|
||||
func RightTrim(str, chars string) string
|
||||
func RuneLength(str string, params ...string) bool
|
||||
func SafeFileName(str string) string
|
||||
func SetFieldsRequiredByDefault(value bool)
|
||||
func SetNilPtrAllowedByRequired(value bool)
|
||||
func Sign(value float64) float64
|
||||
func StringLength(str string, params ...string) bool
|
||||
func StringMatches(s string, params ...string) bool
|
||||
func StripLow(str string, keepNewLines bool) string
|
||||
func ToBoolean(str string) (bool, error)
|
||||
func ToFloat(str string) (float64, error)
|
||||
func ToInt(value interface{}) (res int64, err error)
|
||||
func ToJSON(obj interface{}) (string, error)
|
||||
func ToString(obj interface{}) string
|
||||
func Trim(str, chars string) string
|
||||
func Truncate(str string, length int, ending string) string
|
||||
func TruncatingErrorf(str string, args ...interface{}) error
|
||||
func UnderscoreToCamelCase(s string) string
|
||||
func ValidateMap(inputMap map[string]interface{}, validationMap map[string]interface{}) (bool, error)
|
||||
func ValidateStruct(s interface{}) (bool, error)
|
||||
func WhiteList(str, chars string) string
|
||||
type ConditionIterator
|
||||
type CustomTypeValidator
|
||||
type Error
|
||||
func (e Error) Error() string
|
||||
type Errors
|
||||
func (es Errors) Error() string
|
||||
func (es Errors) Errors() []error
|
||||
type ISO3166Entry
|
||||
type ISO693Entry
|
||||
type InterfaceParamValidator
|
||||
type Iterator
|
||||
type ParamValidator
|
||||
type ResultIterator
|
||||
type UnsupportedTypeError
|
||||
func (e *UnsupportedTypeError) Error() string
|
||||
type Validator
|
||||
```
|
||||
|
||||
#### Examples
|
||||
###### IsURL
|
||||
```go
|
||||
println(govalidator.IsURL(`http://user@pass:domain.com/path/page`))
|
||||
```
|
||||
###### IsType
|
||||
```go
|
||||
println(govalidator.IsType("Bob", "string"))
|
||||
println(govalidator.IsType(1, "int"))
|
||||
i := 1
|
||||
println(govalidator.IsType(&i, "*int"))
|
||||
```
|
||||
|
||||
IsType can be used through the tag `type` which is essential for map validation:
|
||||
```go
|
||||
type User struct {
|
||||
Name string `valid:"type(string)"`
|
||||
Age int `valid:"type(int)"`
|
||||
Meta interface{} `valid:"type(string)"`
|
||||
}
|
||||
result, err := govalidator.ValidateStruct(User{"Bob", 20, "meta"})
|
||||
if err != nil {
|
||||
println("error: " + err.Error())
|
||||
}
|
||||
println(result)
|
||||
```
|
||||
###### ToString
|
||||
```go
|
||||
type User struct {
|
||||
FirstName string
|
||||
LastName string
|
||||
}
|
||||
|
||||
str := govalidator.ToString(&User{"John", "Juan"})
|
||||
println(str)
|
||||
```
|
||||
###### Each, Map, Filter, Count for slices
|
||||
Each iterates over the slice/array and calls Iterator for every item
|
||||
```go
|
||||
data := []interface{}{1, 2, 3, 4, 5}
|
||||
var fn govalidator.Iterator = func(value interface{}, index int) {
|
||||
println(value.(int))
|
||||
}
|
||||
govalidator.Each(data, fn)
|
||||
```
|
||||
```go
|
||||
data := []interface{}{1, 2, 3, 4, 5}
|
||||
var fn govalidator.ResultIterator = func(value interface{}, index int) interface{} {
|
||||
return value.(int) * 3
|
||||
}
|
||||
_ = govalidator.Map(data, fn) // result = []interface{}{1, 6, 9, 12, 15}
|
||||
```
|
||||
```go
|
||||
data := []interface{}{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
|
||||
var fn govalidator.ConditionIterator = func(value interface{}, index int) bool {
|
||||
return value.(int)%2 == 0
|
||||
}
|
||||
_ = govalidator.Filter(data, fn) // result = []interface{}{2, 4, 6, 8, 10}
|
||||
_ = govalidator.Count(data, fn) // result = 5
|
||||
```
|
||||
###### ValidateStruct [#2](https://github.com/asaskevich/govalidator/pull/2)
|
||||
If you want to validate structs, you can use tag `valid` for any field in your structure. All validators used with this field in one tag are separated by comma. If you want to skip validation, place `-` in your tag. If you need a validator that is not on the list below, you can add it like this:
|
||||
```go
|
||||
govalidator.TagMap["duck"] = govalidator.Validator(func(str string) bool {
|
||||
return str == "duck"
|
||||
})
|
||||
```
|
||||
For completely custom validators (interface-based), see below.
|
||||
|
||||
Here is a list of available validators for struct fields (validator - used function):
|
||||
```go
|
||||
"email": IsEmail,
|
||||
"url": IsURL,
|
||||
"dialstring": IsDialString,
|
||||
"requrl": IsRequestURL,
|
||||
"requri": IsRequestURI,
|
||||
"alpha": IsAlpha,
|
||||
"utfletter": IsUTFLetter,
|
||||
"alphanum": IsAlphanumeric,
|
||||
"utfletternum": IsUTFLetterNumeric,
|
||||
"numeric": IsNumeric,
|
||||
"utfnumeric": IsUTFNumeric,
|
||||
"utfdigit": IsUTFDigit,
|
||||
"hexadecimal": IsHexadecimal,
|
||||
"hexcolor": IsHexcolor,
|
||||
"rgbcolor": IsRGBcolor,
|
||||
"lowercase": IsLowerCase,
|
||||
"uppercase": IsUpperCase,
|
||||
"int": IsInt,
|
||||
"float": IsFloat,
|
||||
"null": IsNull,
|
||||
"uuid": IsUUID,
|
||||
"uuidv3": IsUUIDv3,
|
||||
"uuidv4": IsUUIDv4,
|
||||
"uuidv5": IsUUIDv5,
|
||||
"creditcard": IsCreditCard,
|
||||
"isbn10": IsISBN10,
|
||||
"isbn13": IsISBN13,
|
||||
"json": IsJSON,
|
||||
"multibyte": IsMultibyte,
|
||||
"ascii": IsASCII,
|
||||
"printableascii": IsPrintableASCII,
|
||||
"fullwidth": IsFullWidth,
|
||||
"halfwidth": IsHalfWidth,
|
||||
"variablewidth": IsVariableWidth,
|
||||
"base64": IsBase64,
|
||||
"datauri": IsDataURI,
|
||||
"ip": IsIP,
|
||||
"port": IsPort,
|
||||
"ipv4": IsIPv4,
|
||||
"ipv6": IsIPv6,
|
||||
"dns": IsDNSName,
|
||||
"host": IsHost,
|
||||
"mac": IsMAC,
|
||||
"latitude": IsLatitude,
|
||||
"longitude": IsLongitude,
|
||||
"ssn": IsSSN,
|
||||
"semver": IsSemver,
|
||||
"rfc3339": IsRFC3339,
|
||||
"rfc3339WithoutZone": IsRFC3339WithoutZone,
|
||||
"ISO3166Alpha2": IsISO3166Alpha2,
|
||||
"ISO3166Alpha3": IsISO3166Alpha3,
|
||||
"ulid": IsULID,
|
||||
```
|
||||
Validators with parameters
|
||||
|
||||
```go
|
||||
"range(min|max)": Range,
|
||||
"length(min|max)": ByteLength,
|
||||
"runelength(min|max)": RuneLength,
|
||||
"stringlength(min|max)": StringLength,
|
||||
"matches(pattern)": StringMatches,
|
||||
"in(string1|string2|...|stringN)": IsIn,
|
||||
"rsapub(keylength)" : IsRsaPub,
|
||||
"minstringlength(int): MinStringLength,
|
||||
"maxstringlength(int): MaxStringLength,
|
||||
```
|
||||
Validators with parameters for any type
|
||||
|
||||
```go
|
||||
"type(type)": IsType,
|
||||
```
|
||||
|
||||
And here is small example of usage:
|
||||
```go
|
||||
type Post struct {
|
||||
Title string `valid:"alphanum,required"`
|
||||
Message string `valid:"duck,ascii"`
|
||||
Message2 string `valid:"animal(dog)"`
|
||||
AuthorIP string `valid:"ipv4"`
|
||||
Date string `valid:"-"`
|
||||
}
|
||||
post := &Post{
|
||||
Title: "My Example Post",
|
||||
Message: "duck",
|
||||
Message2: "dog",
|
||||
AuthorIP: "123.234.54.3",
|
||||
}
|
||||
|
||||
// Add your own struct validation tags
|
||||
govalidator.TagMap["duck"] = govalidator.Validator(func(str string) bool {
|
||||
return str == "duck"
|
||||
})
|
||||
|
||||
// Add your own struct validation tags with parameter
|
||||
govalidator.ParamTagMap["animal"] = govalidator.ParamValidator(func(str string, params ...string) bool {
|
||||
species := params[0]
|
||||
return str == species
|
||||
})
|
||||
govalidator.ParamTagRegexMap["animal"] = regexp.MustCompile("^animal\\((\\w+)\\)$")
|
||||
|
||||
result, err := govalidator.ValidateStruct(post)
|
||||
if err != nil {
|
||||
println("error: " + err.Error())
|
||||
}
|
||||
println(result)
|
||||
```
|
||||
###### ValidateMap [#2](https://github.com/asaskevich/govalidator/pull/338)
|
||||
If you want to validate maps, you can use the map to be validated and a validation map that contain the same tags used in ValidateStruct, both maps have to be in the form `map[string]interface{}`
|
||||
|
||||
So here is small example of usage:
|
||||
```go
|
||||
var mapTemplate = map[string]interface{}{
|
||||
"name":"required,alpha",
|
||||
"family":"required,alpha",
|
||||
"email":"required,email",
|
||||
"cell-phone":"numeric",
|
||||
"address":map[string]interface{}{
|
||||
"line1":"required,alphanum",
|
||||
"line2":"alphanum",
|
||||
"postal-code":"numeric",
|
||||
},
|
||||
}
|
||||
|
||||
var inputMap = map[string]interface{}{
|
||||
"name":"Bob",
|
||||
"family":"Smith",
|
||||
"email":"foo@bar.baz",
|
||||
"address":map[string]interface{}{
|
||||
"line1":"",
|
||||
"line2":"",
|
||||
"postal-code":"",
|
||||
},
|
||||
}
|
||||
|
||||
result, err := govalidator.ValidateMap(inputMap, mapTemplate)
|
||||
if err != nil {
|
||||
println("error: " + err.Error())
|
||||
}
|
||||
println(result)
|
||||
```
|
||||
|
||||
###### WhiteList
|
||||
```go
|
||||
// Remove all characters from string ignoring characters between "a" and "z"
|
||||
println(govalidator.WhiteList("a3a43a5a4a3a2a23a4a5a4a3a4", "a-z") == "aaaaaaaaaaaa")
|
||||
```
|
||||
|
||||
###### Custom validation functions
|
||||
Custom validation using your own domain specific validators is also available - here's an example of how to use it:
|
||||
```go
|
||||
import "github.com/asaskevich/govalidator"
|
||||
|
||||
type CustomByteArray [6]byte // custom types are supported and can be validated
|
||||
|
||||
type StructWithCustomByteArray struct {
|
||||
ID CustomByteArray `valid:"customByteArrayValidator,customMinLengthValidator"` // multiple custom validators are possible as well and will be evaluated in sequence
|
||||
Email string `valid:"email"`
|
||||
CustomMinLength int `valid:"-"`
|
||||
}
|
||||
|
||||
govalidator.CustomTypeTagMap.Set("customByteArrayValidator", func(i interface{}, context interface{}) bool {
|
||||
switch v := context.(type) { // you can type switch on the context interface being validated
|
||||
case StructWithCustomByteArray:
|
||||
// you can check and validate against some other field in the context,
|
||||
// return early or not validate against the context at all – your choice
|
||||
case SomeOtherType:
|
||||
// ...
|
||||
default:
|
||||
// expecting some other type? Throw/panic here or continue
|
||||
}
|
||||
|
||||
switch v := i.(type) { // type switch on the struct field being validated
|
||||
case CustomByteArray:
|
||||
for _, e := range v { // this validator checks that the byte array is not empty, i.e. not all zeroes
|
||||
if e != 0 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
})
|
||||
govalidator.CustomTypeTagMap.Set("customMinLengthValidator", func(i interface{}, context interface{}) bool {
|
||||
switch v := context.(type) { // this validates a field against the value in another field, i.e. dependent validation
|
||||
case StructWithCustomByteArray:
|
||||
return len(v.ID) >= v.CustomMinLength
|
||||
}
|
||||
return false
|
||||
})
|
||||
```
|
||||
|
||||
###### Loop over Error()
|
||||
By default .Error() returns all errors in a single String. To access each error you can do this:
|
||||
```go
|
||||
if err != nil {
|
||||
errs := err.(govalidator.Errors).Errors()
|
||||
for _, e := range errs {
|
||||
fmt.Println(e.Error())
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
###### Custom error messages
|
||||
Custom error messages are supported via annotations by adding the `~` separator - here's an example of how to use it:
|
||||
```go
|
||||
type Ticket struct {
|
||||
Id int64 `json:"id"`
|
||||
FirstName string `json:"firstname" valid:"required~First name is blank"`
|
||||
}
|
||||
```
|
||||
|
||||
#### Notes
|
||||
Documentation is available here: [godoc.org](https://godoc.org/github.com/asaskevich/govalidator).
|
||||
Full information about code coverage is also available here: [govalidator on gocover.io](http://gocover.io/github.com/asaskevich/govalidator).
|
||||
|
||||
#### Support
|
||||
If you do have a contribution to the package, feel free to create a Pull Request or an Issue.
|
||||
|
||||
#### What to contribute
|
||||
If you don't know what to do, there are some features and functions that need to be done
|
||||
|
||||
- [ ] Refactor code
|
||||
- [ ] Edit docs and [README](https://github.com/asaskevich/govalidator/README.md): spellcheck, grammar and typo check
|
||||
- [ ] Create actual list of contributors and projects that currently using this package
|
||||
- [ ] Resolve [issues and bugs](https://github.com/asaskevich/govalidator/issues)
|
||||
- [ ] Update actual [list of functions](https://github.com/asaskevich/govalidator#list-of-functions)
|
||||
- [ ] Update [list of validators](https://github.com/asaskevich/govalidator#validatestruct-2) that available for `ValidateStruct` and add new
|
||||
- [ ] Implement new validators: `IsFQDN`, `IsIMEI`, `IsPostalCode`, `IsISIN`, `IsISRC` etc
|
||||
- [x] Implement [validation by maps](https://github.com/asaskevich/govalidator/issues/224)
|
||||
- [ ] Implement fuzzing testing
|
||||
- [ ] Implement some struct/map/array utilities
|
||||
- [ ] Implement map/array validation
|
||||
- [ ] Implement benchmarking
|
||||
- [ ] Implement batch of examples
|
||||
- [ ] Look at forks for new features and fixes
|
||||
|
||||
#### Advice
|
||||
Feel free to create what you want, but keep in mind when you implement new features:
|
||||
- Code must be clear and readable, names of variables/constants clearly describes what they are doing
|
||||
- Public functions must be documented and described in source file and added to README.md to the list of available functions
|
||||
- There are must be unit-tests for any new functions and improvements
|
||||
|
||||
## Credits
|
||||
### Contributors
|
||||
|
||||
This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)].
|
||||
|
||||
#### Special thanks to [contributors](https://github.com/asaskevich/govalidator/graphs/contributors)
|
||||
* [Daniel Lohse](https://github.com/annismckenzie)
|
||||
* [Attila Oláh](https://github.com/attilaolah)
|
||||
* [Daniel Korner](https://github.com/Dadie)
|
||||
* [Steven Wilkin](https://github.com/stevenwilkin)
|
||||
* [Deiwin Sarjas](https://github.com/deiwin)
|
||||
* [Noah Shibley](https://github.com/slugmobile)
|
||||
* [Nathan Davies](https://github.com/nathj07)
|
||||
* [Matt Sanford](https://github.com/mzsanford)
|
||||
* [Simon ccl1115](https://github.com/ccl1115)
|
||||
|
||||
<a href="https://github.com/asaskevich/govalidator/graphs/contributors"><img src="https://opencollective.com/govalidator/contributors.svg?width=890" /></a>
|
||||
|
||||
|
||||
### Backers
|
||||
|
||||
Thank you to all our backers! 🙏 [[Become a backer](https://opencollective.com/govalidator#backer)]
|
||||
|
||||
<a href="https://opencollective.com/govalidator#backers" target="_blank"><img src="https://opencollective.com/govalidator/backers.svg?width=890"></a>
|
||||
|
||||
|
||||
### Sponsors
|
||||
|
||||
Support this project by becoming a sponsor. Your logo will show up here with a link to your website. [[Become a sponsor](https://opencollective.com/govalidator#sponsor)]
|
||||
|
||||
<a href="https://opencollective.com/govalidator/sponsor/0/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/0/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/govalidator/sponsor/1/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/1/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/govalidator/sponsor/2/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/2/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/govalidator/sponsor/3/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/3/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/govalidator/sponsor/4/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/4/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/govalidator/sponsor/5/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/5/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/govalidator/sponsor/6/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/6/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/govalidator/sponsor/7/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/7/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/govalidator/sponsor/8/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/8/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/govalidator/sponsor/9/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/9/avatar.svg"></a>
|
||||
|
||||
|
||||
|
||||
|
||||
## License
|
||||
[](https://app.fossa.io/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator?ref=badge_large)
|
87
vendor/github.com/asaskevich/govalidator/arrays.go
generated
vendored
Normal file
87
vendor/github.com/asaskevich/govalidator/arrays.go
generated
vendored
Normal file
@ -0,0 +1,87 @@
|
||||
package govalidator
|
||||
|
||||
// Iterator is the function that accepts element of slice/array and its index
|
||||
type Iterator func(interface{}, int)
|
||||
|
||||
// ResultIterator is the function that accepts element of slice/array and its index and returns any result
|
||||
type ResultIterator func(interface{}, int) interface{}
|
||||
|
||||
// ConditionIterator is the function that accepts element of slice/array and its index and returns boolean
|
||||
type ConditionIterator func(interface{}, int) bool
|
||||
|
||||
// ReduceIterator is the function that accepts two element of slice/array and returns result of merging those values
|
||||
type ReduceIterator func(interface{}, interface{}) interface{}
|
||||
|
||||
// Some validates that any item of array corresponds to ConditionIterator. Returns boolean.
|
||||
func Some(array []interface{}, iterator ConditionIterator) bool {
|
||||
res := false
|
||||
for index, data := range array {
|
||||
res = res || iterator(data, index)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
// Every validates that every item of array corresponds to ConditionIterator. Returns boolean.
|
||||
func Every(array []interface{}, iterator ConditionIterator) bool {
|
||||
res := true
|
||||
for index, data := range array {
|
||||
res = res && iterator(data, index)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
// Reduce boils down a list of values into a single value by ReduceIterator
|
||||
func Reduce(array []interface{}, iterator ReduceIterator, initialValue interface{}) interface{} {
|
||||
for _, data := range array {
|
||||
initialValue = iterator(initialValue, data)
|
||||
}
|
||||
return initialValue
|
||||
}
|
||||
|
||||
// Each iterates over the slice and apply Iterator to every item
|
||||
func Each(array []interface{}, iterator Iterator) {
|
||||
for index, data := range array {
|
||||
iterator(data, index)
|
||||
}
|
||||
}
|
||||
|
||||
// Map iterates over the slice and apply ResultIterator to every item. Returns new slice as a result.
|
||||
func Map(array []interface{}, iterator ResultIterator) []interface{} {
|
||||
var result = make([]interface{}, len(array))
|
||||
for index, data := range array {
|
||||
result[index] = iterator(data, index)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// Find iterates over the slice and apply ConditionIterator to every item. Returns first item that meet ConditionIterator or nil otherwise.
|
||||
func Find(array []interface{}, iterator ConditionIterator) interface{} {
|
||||
for index, data := range array {
|
||||
if iterator(data, index) {
|
||||
return data
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Filter iterates over the slice and apply ConditionIterator to every item. Returns new slice.
|
||||
func Filter(array []interface{}, iterator ConditionIterator) []interface{} {
|
||||
var result = make([]interface{}, 0)
|
||||
for index, data := range array {
|
||||
if iterator(data, index) {
|
||||
result = append(result, data)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// Count iterates over the slice and apply ConditionIterator to every item. Returns count of items that meets ConditionIterator.
|
||||
func Count(array []interface{}, iterator ConditionIterator) int {
|
||||
count := 0
|
||||
for index, data := range array {
|
||||
if iterator(data, index) {
|
||||
count = count + 1
|
||||
}
|
||||
}
|
||||
return count
|
||||
}
|
81
vendor/github.com/asaskevich/govalidator/converter.go
generated
vendored
Normal file
81
vendor/github.com/asaskevich/govalidator/converter.go
generated
vendored
Normal file
@ -0,0 +1,81 @@
|
||||
package govalidator
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// ToString convert the input to a string.
|
||||
func ToString(obj interface{}) string {
|
||||
res := fmt.Sprintf("%v", obj)
|
||||
return res
|
||||
}
|
||||
|
||||
// ToJSON convert the input to a valid JSON string
|
||||
func ToJSON(obj interface{}) (string, error) {
|
||||
res, err := json.Marshal(obj)
|
||||
if err != nil {
|
||||
res = []byte("")
|
||||
}
|
||||
return string(res), err
|
||||
}
|
||||
|
||||
// ToFloat convert the input string to a float, or 0.0 if the input is not a float.
|
||||
func ToFloat(value interface{}) (res float64, err error) {
|
||||
val := reflect.ValueOf(value)
|
||||
|
||||
switch value.(type) {
|
||||
case int, int8, int16, int32, int64:
|
||||
res = float64(val.Int())
|
||||
case uint, uint8, uint16, uint32, uint64:
|
||||
res = float64(val.Uint())
|
||||
case float32, float64:
|
||||
res = val.Float()
|
||||
case string:
|
||||
res, err = strconv.ParseFloat(val.String(), 64)
|
||||
if err != nil {
|
||||
res = 0
|
||||
}
|
||||
default:
|
||||
err = fmt.Errorf("ToInt: unknown interface type %T", value)
|
||||
res = 0
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// ToInt convert the input string or any int type to an integer type 64, or 0 if the input is not an integer.
|
||||
func ToInt(value interface{}) (res int64, err error) {
|
||||
val := reflect.ValueOf(value)
|
||||
|
||||
switch value.(type) {
|
||||
case int, int8, int16, int32, int64:
|
||||
res = val.Int()
|
||||
case uint, uint8, uint16, uint32, uint64:
|
||||
res = int64(val.Uint())
|
||||
case float32, float64:
|
||||
res = int64(val.Float())
|
||||
case string:
|
||||
if IsInt(val.String()) {
|
||||
res, err = strconv.ParseInt(val.String(), 0, 64)
|
||||
if err != nil {
|
||||
res = 0
|
||||
}
|
||||
} else {
|
||||
err = fmt.Errorf("ToInt: invalid numeric format %g", value)
|
||||
res = 0
|
||||
}
|
||||
default:
|
||||
err = fmt.Errorf("ToInt: unknown interface type %T", value)
|
||||
res = 0
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// ToBoolean convert the input string to a boolean.
|
||||
func ToBoolean(str string) (bool, error) {
|
||||
return strconv.ParseBool(str)
|
||||
}
|
3
vendor/github.com/asaskevich/govalidator/doc.go
generated
vendored
Normal file
3
vendor/github.com/asaskevich/govalidator/doc.go
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
package govalidator
|
||||
|
||||
// A package of validators and sanitizers for strings, structures and collections.
|
47
vendor/github.com/asaskevich/govalidator/error.go
generated
vendored
Normal file
47
vendor/github.com/asaskevich/govalidator/error.go
generated
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
package govalidator
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Errors is an array of multiple errors and conforms to the error interface.
|
||||
type Errors []error
|
||||
|
||||
// Errors returns itself.
|
||||
func (es Errors) Errors() []error {
|
||||
return es
|
||||
}
|
||||
|
||||
func (es Errors) Error() string {
|
||||
var errs []string
|
||||
for _, e := range es {
|
||||
errs = append(errs, e.Error())
|
||||
}
|
||||
sort.Strings(errs)
|
||||
return strings.Join(errs, ";")
|
||||
}
|
||||
|
||||
// Error encapsulates a name, an error and whether there's a custom error message or not.
|
||||
type Error struct {
|
||||
Name string
|
||||
Err error
|
||||
CustomErrorMessageExists bool
|
||||
|
||||
// Validator indicates the name of the validator that failed
|
||||
Validator string
|
||||
Path []string
|
||||
}
|
||||
|
||||
func (e Error) Error() string {
|
||||
if e.CustomErrorMessageExists {
|
||||
return e.Err.Error()
|
||||
}
|
||||
|
||||
errName := e.Name
|
||||
if len(e.Path) > 0 {
|
||||
errName = strings.Join(append(e.Path, e.Name), ".")
|
||||
}
|
||||
|
||||
return errName + ": " + e.Err.Error()
|
||||
}
|
100
vendor/github.com/asaskevich/govalidator/numerics.go
generated
vendored
Normal file
100
vendor/github.com/asaskevich/govalidator/numerics.go
generated
vendored
Normal file
@ -0,0 +1,100 @@
|
||||
package govalidator
|
||||
|
||||
import (
|
||||
"math"
|
||||
)
|
||||
|
||||
// Abs returns absolute value of number
|
||||
func Abs(value float64) float64 {
|
||||
return math.Abs(value)
|
||||
}
|
||||
|
||||
// Sign returns signum of number: 1 in case of value > 0, -1 in case of value < 0, 0 otherwise
|
||||
func Sign(value float64) float64 {
|
||||
if value > 0 {
|
||||
return 1
|
||||
} else if value < 0 {
|
||||
return -1
|
||||
} else {
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
// IsNegative returns true if value < 0
|
||||
func IsNegative(value float64) bool {
|
||||
return value < 0
|
||||
}
|
||||
|
||||
// IsPositive returns true if value > 0
|
||||
func IsPositive(value float64) bool {
|
||||
return value > 0
|
||||
}
|
||||
|
||||
// IsNonNegative returns true if value >= 0
|
||||
func IsNonNegative(value float64) bool {
|
||||
return value >= 0
|
||||
}
|
||||
|
||||
// IsNonPositive returns true if value <= 0
|
||||
func IsNonPositive(value float64) bool {
|
||||
return value <= 0
|
||||
}
|
||||
|
||||
// InRangeInt returns true if value lies between left and right border
|
||||
func InRangeInt(value, left, right interface{}) bool {
|
||||
value64, _ := ToInt(value)
|
||||
left64, _ := ToInt(left)
|
||||
right64, _ := ToInt(right)
|
||||
if left64 > right64 {
|
||||
left64, right64 = right64, left64
|
||||
}
|
||||
return value64 >= left64 && value64 <= right64
|
||||
}
|
||||
|
||||
// InRangeFloat32 returns true if value lies between left and right border
|
||||
func InRangeFloat32(value, left, right float32) bool {
|
||||
if left > right {
|
||||
left, right = right, left
|
||||
}
|
||||
return value >= left && value <= right
|
||||
}
|
||||
|
||||
// InRangeFloat64 returns true if value lies between left and right border
|
||||
func InRangeFloat64(value, left, right float64) bool {
|
||||
if left > right {
|
||||
left, right = right, left
|
||||
}
|
||||
return value >= left && value <= right
|
||||
}
|
||||
|
||||
// InRange returns true if value lies between left and right border, generic type to handle int, float32, float64 and string.
|
||||
// All types must the same type.
|
||||
// False if value doesn't lie in range or if it incompatible or not comparable
|
||||
func InRange(value interface{}, left interface{}, right interface{}) bool {
|
||||
switch value.(type) {
|
||||
case int:
|
||||
intValue, _ := ToInt(value)
|
||||
intLeft, _ := ToInt(left)
|
||||
intRight, _ := ToInt(right)
|
||||
return InRangeInt(intValue, intLeft, intRight)
|
||||
case float32, float64:
|
||||
intValue, _ := ToFloat(value)
|
||||
intLeft, _ := ToFloat(left)
|
||||
intRight, _ := ToFloat(right)
|
||||
return InRangeFloat64(intValue, intLeft, intRight)
|
||||
case string:
|
||||
return value.(string) >= left.(string) && value.(string) <= right.(string)
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// IsWhole returns true if value is whole number
|
||||
func IsWhole(value float64) bool {
|
||||
return math.Remainder(value, 1) == 0
|
||||
}
|
||||
|
||||
// IsNatural returns true if value is natural number (positive and whole)
|
||||
func IsNatural(value float64) bool {
|
||||
return IsWhole(value) && IsPositive(value)
|
||||
}
|
113
vendor/github.com/asaskevich/govalidator/patterns.go
generated
vendored
Normal file
113
vendor/github.com/asaskevich/govalidator/patterns.go
generated
vendored
Normal file
@ -0,0 +1,113 @@
|
||||
package govalidator
|
||||
|
||||
import "regexp"
|
||||
|
||||
// Basic regular expressions for validating strings
|
||||
const (
|
||||
Email string = "^(((([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+(\\.([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+)*)|((\\x22)((((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(([\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(\\([\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}]))))*(((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(\\x22)))@((([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|\\.|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.)+(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.?$"
|
||||
CreditCard string = "^(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|(222[1-9]|22[3-9][0-9]|2[3-6][0-9]{2}|27[01][0-9]|2720)[0-9]{12}|6(?:011|5[0-9][0-9])[0-9]{12}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\\d{3})\\d{11}|6[27][0-9]{14})$"
|
||||
ISBN10 string = "^(?:[0-9]{9}X|[0-9]{10})$"
|
||||
ISBN13 string = "^(?:[0-9]{13})$"
|
||||
UUID3 string = "^[0-9a-f]{8}-[0-9a-f]{4}-3[0-9a-f]{3}-[0-9a-f]{4}-[0-9a-f]{12}$"
|
||||
UUID4 string = "^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$"
|
||||
UUID5 string = "^[0-9a-f]{8}-[0-9a-f]{4}-5[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$"
|
||||
UUID string = "^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$"
|
||||
Alpha string = "^[a-zA-Z]+$"
|
||||
Alphanumeric string = "^[a-zA-Z0-9]+$"
|
||||
Numeric string = "^[0-9]+$"
|
||||
Int string = "^(?:[-+]?(?:0|[1-9][0-9]*))$"
|
||||
Float string = "^(?:[-+]?(?:[0-9]+))?(?:\\.[0-9]*)?(?:[eE][\\+\\-]?(?:[0-9]+))?$"
|
||||
Hexadecimal string = "^[0-9a-fA-F]+$"
|
||||
Hexcolor string = "^#?([0-9a-fA-F]{3}|[0-9a-fA-F]{6})$"
|
||||
RGBcolor string = "^rgb\\(\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*\\)$"
|
||||
ASCII string = "^[\x00-\x7F]+$"
|
||||
Multibyte string = "[^\x00-\x7F]"
|
||||
FullWidth string = "[^\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]"
|
||||
HalfWidth string = "[\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]"
|
||||
Base64 string = "^(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+\\/]{3}=|[A-Za-z0-9+\\/]{4})$"
|
||||
PrintableASCII string = "^[\x20-\x7E]+$"
|
||||
DataURI string = "^data:.+\\/(.+);base64$"
|
||||
MagnetURI string = "^magnet:\\?xt=urn:[a-zA-Z0-9]+:[a-zA-Z0-9]{32,40}&dn=.+&tr=.+$"
|
||||
Latitude string = "^[-+]?([1-8]?\\d(\\.\\d+)?|90(\\.0+)?)$"
|
||||
Longitude string = "^[-+]?(180(\\.0+)?|((1[0-7]\\d)|([1-9]?\\d))(\\.\\d+)?)$"
|
||||
DNSName string = `^([a-zA-Z0-9_]{1}[a-zA-Z0-9_-]{0,62}){1}(\.[a-zA-Z0-9_]{1}[a-zA-Z0-9_-]{0,62})*[\._]?$`
|
||||
IP string = `(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))`
|
||||
URLSchema string = `((ftp|tcp|udp|wss?|https?):\/\/)`
|
||||
URLUsername string = `(\S+(:\S*)?@)`
|
||||
URLPath string = `((\/|\?|#)[^\s]*)`
|
||||
URLPort string = `(:(\d{1,5}))`
|
||||
URLIP string = `([1-9]\d?|1\d\d|2[01]\d|22[0-3]|24\d|25[0-5])(\.(\d{1,2}|1\d\d|2[0-4]\d|25[0-5])){2}(?:\.([0-9]\d?|1\d\d|2[0-4]\d|25[0-5]))`
|
||||
URLSubdomain string = `((www\.)|([a-zA-Z0-9]+([-_\.]?[a-zA-Z0-9])*[a-zA-Z0-9]\.[a-zA-Z0-9]+))`
|
||||
URL = `^` + URLSchema + `?` + URLUsername + `?` + `((` + URLIP + `|(\[` + IP + `\])|(([a-zA-Z0-9]([a-zA-Z0-9-_]+)?[a-zA-Z0-9]([-\.][a-zA-Z0-9]+)*)|(` + URLSubdomain + `?))?(([a-zA-Z\x{00a1}-\x{ffff}0-9]+-?-?)*[a-zA-Z\x{00a1}-\x{ffff}0-9]+)(?:\.([a-zA-Z\x{00a1}-\x{ffff}]{1,}))?))\.?` + URLPort + `?` + URLPath + `?$`
|
||||
SSN string = `^\d{3}[- ]?\d{2}[- ]?\d{4}$`
|
||||
WinPath string = `^[a-zA-Z]:\\(?:[^\\/:*?"<>|\r\n]+\\)*[^\\/:*?"<>|\r\n]*$`
|
||||
UnixPath string = `^(/[^/\x00]*)+/?$`
|
||||
WinARPath string = `^(?:(?:[a-zA-Z]:|\\\\[a-z0-9_.$●-]+\\[a-z0-9_.$●-]+)\\|\\?[^\\/:*?"<>|\r\n]+\\?)(?:[^\\/:*?"<>|\r\n]+\\)*[^\\/:*?"<>|\r\n]*$`
|
||||
UnixARPath string = `^((\.{0,2}/)?([^/\x00]*))+/?$`
|
||||
Semver string = "^v?(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)(-(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(\\.(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(\\+[0-9a-zA-Z-]+(\\.[0-9a-zA-Z-]+)*)?$"
|
||||
tagName string = "valid"
|
||||
hasLowerCase string = ".*[[:lower:]]"
|
||||
hasUpperCase string = ".*[[:upper:]]"
|
||||
hasWhitespace string = ".*[[:space:]]"
|
||||
hasWhitespaceOnly string = "^[[:space:]]+$"
|
||||
IMEI string = "^[0-9a-f]{14}$|^\\d{15}$|^\\d{18}$"
|
||||
IMSI string = "^\\d{14,15}$"
|
||||
E164 string = `^\+?[1-9]\d{1,14}$`
|
||||
)
|
||||
|
||||
// Used by IsFilePath func
|
||||
const (
|
||||
// Unknown is unresolved OS type
|
||||
Unknown = iota
|
||||
// Win is Windows type
|
||||
Win
|
||||
// Unix is *nix OS types
|
||||
Unix
|
||||
)
|
||||
|
||||
var (
|
||||
userRegexp = regexp.MustCompile("^[a-zA-Z0-9!#$%&'*+/=?^_`{|}~.-]+$")
|
||||
hostRegexp = regexp.MustCompile("^[^\\s]+\\.[^\\s]+$")
|
||||
userDotRegexp = regexp.MustCompile("(^[.]{1})|([.]{1}$)|([.]{2,})")
|
||||
rxEmail = regexp.MustCompile(Email)
|
||||
rxCreditCard = regexp.MustCompile(CreditCard)
|
||||
rxISBN10 = regexp.MustCompile(ISBN10)
|
||||
rxISBN13 = regexp.MustCompile(ISBN13)
|
||||
rxUUID3 = regexp.MustCompile(UUID3)
|
||||
rxUUID4 = regexp.MustCompile(UUID4)
|
||||
rxUUID5 = regexp.MustCompile(UUID5)
|
||||
rxUUID = regexp.MustCompile(UUID)
|
||||
rxAlpha = regexp.MustCompile(Alpha)
|
||||
rxAlphanumeric = regexp.MustCompile(Alphanumeric)
|
||||
rxNumeric = regexp.MustCompile(Numeric)
|
||||
rxInt = regexp.MustCompile(Int)
|
||||
rxFloat = regexp.MustCompile(Float)
|
||||
rxHexadecimal = regexp.MustCompile(Hexadecimal)
|
||||
rxHexcolor = regexp.MustCompile(Hexcolor)
|
||||
rxRGBcolor = regexp.MustCompile(RGBcolor)
|
||||
rxASCII = regexp.MustCompile(ASCII)
|
||||
rxPrintableASCII = regexp.MustCompile(PrintableASCII)
|
||||
rxMultibyte = regexp.MustCompile(Multibyte)
|
||||
rxFullWidth = regexp.MustCompile(FullWidth)
|
||||
rxHalfWidth = regexp.MustCompile(HalfWidth)
|
||||
rxBase64 = regexp.MustCompile(Base64)
|
||||
rxDataURI = regexp.MustCompile(DataURI)
|
||||
rxMagnetURI = regexp.MustCompile(MagnetURI)
|
||||
rxLatitude = regexp.MustCompile(Latitude)
|
||||
rxLongitude = regexp.MustCompile(Longitude)
|
||||
rxDNSName = regexp.MustCompile(DNSName)
|
||||
rxURL = regexp.MustCompile(URL)
|
||||
rxSSN = regexp.MustCompile(SSN)
|
||||
rxWinPath = regexp.MustCompile(WinPath)
|
||||
rxUnixPath = regexp.MustCompile(UnixPath)
|
||||
rxARWinPath = regexp.MustCompile(WinARPath)
|
||||
rxARUnixPath = regexp.MustCompile(UnixARPath)
|
||||
rxSemver = regexp.MustCompile(Semver)
|
||||
rxHasLowerCase = regexp.MustCompile(hasLowerCase)
|
||||
rxHasUpperCase = regexp.MustCompile(hasUpperCase)
|
||||
rxHasWhitespace = regexp.MustCompile(hasWhitespace)
|
||||
rxHasWhitespaceOnly = regexp.MustCompile(hasWhitespaceOnly)
|
||||
rxIMEI = regexp.MustCompile(IMEI)
|
||||
rxIMSI = regexp.MustCompile(IMSI)
|
||||
rxE164 = regexp.MustCompile(E164)
|
||||
)
|
656
vendor/github.com/asaskevich/govalidator/types.go
generated
vendored
Normal file
656
vendor/github.com/asaskevich/govalidator/types.go
generated
vendored
Normal file
@ -0,0 +1,656 @@
|
||||
package govalidator
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"regexp"
|
||||
"sort"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// Validator is a wrapper for a validator function that returns bool and accepts string.
|
||||
type Validator func(str string) bool
|
||||
|
||||
// CustomTypeValidator is a wrapper for validator functions that returns bool and accepts any type.
|
||||
// The second parameter should be the context (in the case of validating a struct: the whole object being validated).
|
||||
type CustomTypeValidator func(i interface{}, o interface{}) bool
|
||||
|
||||
// ParamValidator is a wrapper for validator functions that accept additional parameters.
|
||||
type ParamValidator func(str string, params ...string) bool
|
||||
|
||||
// InterfaceParamValidator is a wrapper for functions that accept variants parameters for an interface value
|
||||
type InterfaceParamValidator func(in interface{}, params ...string) bool
|
||||
type tagOptionsMap map[string]tagOption
|
||||
|
||||
func (t tagOptionsMap) orderedKeys() []string {
|
||||
var keys []string
|
||||
for k := range t {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
|
||||
sort.Slice(keys, func(a, b int) bool {
|
||||
return t[keys[a]].order < t[keys[b]].order
|
||||
})
|
||||
|
||||
return keys
|
||||
}
|
||||
|
||||
type tagOption struct {
|
||||
name string
|
||||
customErrorMessage string
|
||||
order int
|
||||
}
|
||||
|
||||
// UnsupportedTypeError is a wrapper for reflect.Type
|
||||
type UnsupportedTypeError struct {
|
||||
Type reflect.Type
|
||||
}
|
||||
|
||||
// stringValues is a slice of reflect.Value holding *reflect.StringValue.
|
||||
// It implements the methods to sort by string.
|
||||
type stringValues []reflect.Value
|
||||
|
||||
// InterfaceParamTagMap is a map of functions accept variants parameters for an interface value
|
||||
var InterfaceParamTagMap = map[string]InterfaceParamValidator{
|
||||
"type": IsType,
|
||||
}
|
||||
|
||||
// InterfaceParamTagRegexMap maps interface param tags to their respective regexes.
|
||||
var InterfaceParamTagRegexMap = map[string]*regexp.Regexp{
|
||||
"type": regexp.MustCompile(`^type\((.*)\)$`),
|
||||
}
|
||||
|
||||
// ParamTagMap is a map of functions accept variants parameters
|
||||
var ParamTagMap = map[string]ParamValidator{
|
||||
"length": ByteLength,
|
||||
"range": Range,
|
||||
"runelength": RuneLength,
|
||||
"stringlength": StringLength,
|
||||
"matches": StringMatches,
|
||||
"in": IsInRaw,
|
||||
"rsapub": IsRsaPub,
|
||||
"minstringlength": MinStringLength,
|
||||
"maxstringlength": MaxStringLength,
|
||||
}
|
||||
|
||||
// ParamTagRegexMap maps param tags to their respective regexes.
|
||||
var ParamTagRegexMap = map[string]*regexp.Regexp{
|
||||
"range": regexp.MustCompile("^range\\((\\d+)\\|(\\d+)\\)$"),
|
||||
"length": regexp.MustCompile("^length\\((\\d+)\\|(\\d+)\\)$"),
|
||||
"runelength": regexp.MustCompile("^runelength\\((\\d+)\\|(\\d+)\\)$"),
|
||||
"stringlength": regexp.MustCompile("^stringlength\\((\\d+)\\|(\\d+)\\)$"),
|
||||
"in": regexp.MustCompile(`^in\((.*)\)`),
|
||||
"matches": regexp.MustCompile(`^matches\((.+)\)$`),
|
||||
"rsapub": regexp.MustCompile("^rsapub\\((\\d+)\\)$"),
|
||||
"minstringlength": regexp.MustCompile("^minstringlength\\((\\d+)\\)$"),
|
||||
"maxstringlength": regexp.MustCompile("^maxstringlength\\((\\d+)\\)$"),
|
||||
}
|
||||
|
||||
type customTypeTagMap struct {
|
||||
validators map[string]CustomTypeValidator
|
||||
|
||||
sync.RWMutex
|
||||
}
|
||||
|
||||
func (tm *customTypeTagMap) Get(name string) (CustomTypeValidator, bool) {
|
||||
tm.RLock()
|
||||
defer tm.RUnlock()
|
||||
v, ok := tm.validators[name]
|
||||
return v, ok
|
||||
}
|
||||
|
||||
func (tm *customTypeTagMap) Set(name string, ctv CustomTypeValidator) {
|
||||
tm.Lock()
|
||||
defer tm.Unlock()
|
||||
tm.validators[name] = ctv
|
||||
}
|
||||
|
||||
// CustomTypeTagMap is a map of functions that can be used as tags for ValidateStruct function.
|
||||
// Use this to validate compound or custom types that need to be handled as a whole, e.g.
|
||||
// `type UUID [16]byte` (this would be handled as an array of bytes).
|
||||
var CustomTypeTagMap = &customTypeTagMap{validators: make(map[string]CustomTypeValidator)}
|
||||
|
||||
// TagMap is a map of functions, that can be used as tags for ValidateStruct function.
|
||||
var TagMap = map[string]Validator{
|
||||
"email": IsEmail,
|
||||
"url": IsURL,
|
||||
"dialstring": IsDialString,
|
||||
"requrl": IsRequestURL,
|
||||
"requri": IsRequestURI,
|
||||
"alpha": IsAlpha,
|
||||
"utfletter": IsUTFLetter,
|
||||
"alphanum": IsAlphanumeric,
|
||||
"utfletternum": IsUTFLetterNumeric,
|
||||
"numeric": IsNumeric,
|
||||
"utfnumeric": IsUTFNumeric,
|
||||
"utfdigit": IsUTFDigit,
|
||||
"hexadecimal": IsHexadecimal,
|
||||
"hexcolor": IsHexcolor,
|
||||
"rgbcolor": IsRGBcolor,
|
||||
"lowercase": IsLowerCase,
|
||||
"uppercase": IsUpperCase,
|
||||
"int": IsInt,
|
||||
"float": IsFloat,
|
||||
"null": IsNull,
|
||||
"notnull": IsNotNull,
|
||||
"uuid": IsUUID,
|
||||
"uuidv3": IsUUIDv3,
|
||||
"uuidv4": IsUUIDv4,
|
||||
"uuidv5": IsUUIDv5,
|
||||
"creditcard": IsCreditCard,
|
||||
"isbn10": IsISBN10,
|
||||
"isbn13": IsISBN13,
|
||||
"json": IsJSON,
|
||||
"multibyte": IsMultibyte,
|
||||
"ascii": IsASCII,
|
||||
"printableascii": IsPrintableASCII,
|
||||
"fullwidth": IsFullWidth,
|
||||
"halfwidth": IsHalfWidth,
|
||||
"variablewidth": IsVariableWidth,
|
||||
"base64": IsBase64,
|
||||
"datauri": IsDataURI,
|
||||
"ip": IsIP,
|
||||
"port": IsPort,
|
||||
"ipv4": IsIPv4,
|
||||
"ipv6": IsIPv6,
|
||||
"dns": IsDNSName,
|
||||
"host": IsHost,
|
||||
"mac": IsMAC,
|
||||
"latitude": IsLatitude,
|
||||
"longitude": IsLongitude,
|
||||
"ssn": IsSSN,
|
||||
"semver": IsSemver,
|
||||
"rfc3339": IsRFC3339,
|
||||
"rfc3339WithoutZone": IsRFC3339WithoutZone,
|
||||
"ISO3166Alpha2": IsISO3166Alpha2,
|
||||
"ISO3166Alpha3": IsISO3166Alpha3,
|
||||
"ISO4217": IsISO4217,
|
||||
"IMEI": IsIMEI,
|
||||
"ulid": IsULID,
|
||||
}
|
||||
|
||||
// ISO3166Entry stores country codes
|
||||
type ISO3166Entry struct {
|
||||
EnglishShortName string
|
||||
FrenchShortName string
|
||||
Alpha2Code string
|
||||
Alpha3Code string
|
||||
Numeric string
|
||||
}
|
||||
|
||||
//ISO3166List based on https://www.iso.org/obp/ui/#search/code/ Code Type "Officially Assigned Codes"
|
||||
var ISO3166List = []ISO3166Entry{
|
||||
{"Afghanistan", "Afghanistan (l')", "AF", "AFG", "004"},
|
||||
{"Albania", "Albanie (l')", "AL", "ALB", "008"},
|
||||
{"Antarctica", "Antarctique (l')", "AQ", "ATA", "010"},
|
||||
{"Algeria", "Algérie (l')", "DZ", "DZA", "012"},
|
||||
{"American Samoa", "Samoa américaines (les)", "AS", "ASM", "016"},
|
||||
{"Andorra", "Andorre (l')", "AD", "AND", "020"},
|
||||
{"Angola", "Angola (l')", "AO", "AGO", "024"},
|
||||
{"Antigua and Barbuda", "Antigua-et-Barbuda", "AG", "ATG", "028"},
|
||||
{"Azerbaijan", "Azerbaïdjan (l')", "AZ", "AZE", "031"},
|
||||
{"Argentina", "Argentine (l')", "AR", "ARG", "032"},
|
||||
{"Australia", "Australie (l')", "AU", "AUS", "036"},
|
||||
{"Austria", "Autriche (l')", "AT", "AUT", "040"},
|
||||
{"Bahamas (the)", "Bahamas (les)", "BS", "BHS", "044"},
|
||||
{"Bahrain", "Bahreïn", "BH", "BHR", "048"},
|
||||
{"Bangladesh", "Bangladesh (le)", "BD", "BGD", "050"},
|
||||
{"Armenia", "Arménie (l')", "AM", "ARM", "051"},
|
||||
{"Barbados", "Barbade (la)", "BB", "BRB", "052"},
|
||||
{"Belgium", "Belgique (la)", "BE", "BEL", "056"},
|
||||
{"Bermuda", "Bermudes (les)", "BM", "BMU", "060"},
|
||||
{"Bhutan", "Bhoutan (le)", "BT", "BTN", "064"},
|
||||
{"Bolivia (Plurinational State of)", "Bolivie (État plurinational de)", "BO", "BOL", "068"},
|
||||
{"Bosnia and Herzegovina", "Bosnie-Herzégovine (la)", "BA", "BIH", "070"},
|
||||
{"Botswana", "Botswana (le)", "BW", "BWA", "072"},
|
||||
{"Bouvet Island", "Bouvet (l'Île)", "BV", "BVT", "074"},
|
||||
{"Brazil", "Brésil (le)", "BR", "BRA", "076"},
|
||||
{"Belize", "Belize (le)", "BZ", "BLZ", "084"},
|
||||
{"British Indian Ocean Territory (the)", "Indien (le Territoire britannique de l'océan)", "IO", "IOT", "086"},
|
||||
{"Solomon Islands", "Salomon (Îles)", "SB", "SLB", "090"},
|
||||
{"Virgin Islands (British)", "Vierges britanniques (les Îles)", "VG", "VGB", "092"},
|
||||
{"Brunei Darussalam", "Brunéi Darussalam (le)", "BN", "BRN", "096"},
|
||||
{"Bulgaria", "Bulgarie (la)", "BG", "BGR", "100"},
|
||||
{"Myanmar", "Myanmar (le)", "MM", "MMR", "104"},
|
||||
{"Burundi", "Burundi (le)", "BI", "BDI", "108"},
|
||||
{"Belarus", "Bélarus (le)", "BY", "BLR", "112"},
|
||||
{"Cambodia", "Cambodge (le)", "KH", "KHM", "116"},
|
||||
{"Cameroon", "Cameroun (le)", "CM", "CMR", "120"},
|
||||
{"Canada", "Canada (le)", "CA", "CAN", "124"},
|
||||
{"Cabo Verde", "Cabo Verde", "CV", "CPV", "132"},
|
||||
{"Cayman Islands (the)", "Caïmans (les Îles)", "KY", "CYM", "136"},
|
||||
{"Central African Republic (the)", "République centrafricaine (la)", "CF", "CAF", "140"},
|
||||
{"Sri Lanka", "Sri Lanka", "LK", "LKA", "144"},
|
||||
{"Chad", "Tchad (le)", "TD", "TCD", "148"},
|
||||
{"Chile", "Chili (le)", "CL", "CHL", "152"},
|
||||
{"China", "Chine (la)", "CN", "CHN", "156"},
|
||||
{"Taiwan (Province of China)", "Taïwan (Province de Chine)", "TW", "TWN", "158"},
|
||||
{"Christmas Island", "Christmas (l'Île)", "CX", "CXR", "162"},
|
||||
{"Cocos (Keeling) Islands (the)", "Cocos (les Îles)/ Keeling (les Îles)", "CC", "CCK", "166"},
|
||||
{"Colombia", "Colombie (la)", "CO", "COL", "170"},
|
||||
{"Comoros (the)", "Comores (les)", "KM", "COM", "174"},
|
||||
{"Mayotte", "Mayotte", "YT", "MYT", "175"},
|
||||
{"Congo (the)", "Congo (le)", "CG", "COG", "178"},
|
||||
{"Congo (the Democratic Republic of the)", "Congo (la République démocratique du)", "CD", "COD", "180"},
|
||||
{"Cook Islands (the)", "Cook (les Îles)", "CK", "COK", "184"},
|
||||
{"Costa Rica", "Costa Rica (le)", "CR", "CRI", "188"},
|
||||
{"Croatia", "Croatie (la)", "HR", "HRV", "191"},
|
||||
{"Cuba", "Cuba", "CU", "CUB", "192"},
|
||||
{"Cyprus", "Chypre", "CY", "CYP", "196"},
|
||||
{"Czech Republic (the)", "tchèque (la République)", "CZ", "CZE", "203"},
|
||||
{"Benin", "Bénin (le)", "BJ", "BEN", "204"},
|
||||
{"Denmark", "Danemark (le)", "DK", "DNK", "208"},
|
||||
{"Dominica", "Dominique (la)", "DM", "DMA", "212"},
|
||||
{"Dominican Republic (the)", "dominicaine (la République)", "DO", "DOM", "214"},
|
||||
{"Ecuador", "Équateur (l')", "EC", "ECU", "218"},
|
||||
{"El Salvador", "El Salvador", "SV", "SLV", "222"},
|
||||
{"Equatorial Guinea", "Guinée équatoriale (la)", "GQ", "GNQ", "226"},
|
||||
{"Ethiopia", "Éthiopie (l')", "ET", "ETH", "231"},
|
||||
{"Eritrea", "Érythrée (l')", "ER", "ERI", "232"},
|
||||
{"Estonia", "Estonie (l')", "EE", "EST", "233"},
|
||||
{"Faroe Islands (the)", "Féroé (les Îles)", "FO", "FRO", "234"},
|
||||
{"Falkland Islands (the) [Malvinas]", "Falkland (les Îles)/Malouines (les Îles)", "FK", "FLK", "238"},
|
||||
{"South Georgia and the South Sandwich Islands", "Géorgie du Sud-et-les Îles Sandwich du Sud (la)", "GS", "SGS", "239"},
|
||||
{"Fiji", "Fidji (les)", "FJ", "FJI", "242"},
|
||||
{"Finland", "Finlande (la)", "FI", "FIN", "246"},
|
||||
{"Åland Islands", "Åland(les Îles)", "AX", "ALA", "248"},
|
||||
{"France", "France (la)", "FR", "FRA", "250"},
|
||||
{"French Guiana", "Guyane française (la )", "GF", "GUF", "254"},
|
||||
{"French Polynesia", "Polynésie française (la)", "PF", "PYF", "258"},
|
||||
{"French Southern Territories (the)", "Terres australes françaises (les)", "TF", "ATF", "260"},
|
||||
{"Djibouti", "Djibouti", "DJ", "DJI", "262"},
|
||||
{"Gabon", "Gabon (le)", "GA", "GAB", "266"},
|
||||
{"Georgia", "Géorgie (la)", "GE", "GEO", "268"},
|
||||
{"Gambia (the)", "Gambie (la)", "GM", "GMB", "270"},
|
||||
{"Palestine, State of", "Palestine, État de", "PS", "PSE", "275"},
|
||||
{"Germany", "Allemagne (l')", "DE", "DEU", "276"},
|
||||
{"Ghana", "Ghana (le)", "GH", "GHA", "288"},
|
||||
{"Gibraltar", "Gibraltar", "GI", "GIB", "292"},
|
||||
{"Kiribati", "Kiribati", "KI", "KIR", "296"},
|
||||
{"Greece", "Grèce (la)", "GR", "GRC", "300"},
|
||||
{"Greenland", "Groenland (le)", "GL", "GRL", "304"},
|
||||
{"Grenada", "Grenade (la)", "GD", "GRD", "308"},
|
||||
{"Guadeloupe", "Guadeloupe (la)", "GP", "GLP", "312"},
|
||||
{"Guam", "Guam", "GU", "GUM", "316"},
|
||||
{"Guatemala", "Guatemala (le)", "GT", "GTM", "320"},
|
||||
{"Guinea", "Guinée (la)", "GN", "GIN", "324"},
|
||||
{"Guyana", "Guyana (le)", "GY", "GUY", "328"},
|
||||
{"Haiti", "Haïti", "HT", "HTI", "332"},
|
||||
{"Heard Island and McDonald Islands", "Heard-et-Îles MacDonald (l'Île)", "HM", "HMD", "334"},
|
||||
{"Holy See (the)", "Saint-Siège (le)", "VA", "VAT", "336"},
|
||||
{"Honduras", "Honduras (le)", "HN", "HND", "340"},
|
||||
{"Hong Kong", "Hong Kong", "HK", "HKG", "344"},
|
||||
{"Hungary", "Hongrie (la)", "HU", "HUN", "348"},
|
||||
{"Iceland", "Islande (l')", "IS", "ISL", "352"},
|
||||
{"India", "Inde (l')", "IN", "IND", "356"},
|
||||
{"Indonesia", "Indonésie (l')", "ID", "IDN", "360"},
|
||||
{"Iran (Islamic Republic of)", "Iran (République Islamique d')", "IR", "IRN", "364"},
|
||||
{"Iraq", "Iraq (l')", "IQ", "IRQ", "368"},
|
||||
{"Ireland", "Irlande (l')", "IE", "IRL", "372"},
|
||||
{"Israel", "Israël", "IL", "ISR", "376"},
|
||||
{"Italy", "Italie (l')", "IT", "ITA", "380"},
|
||||
{"Côte d'Ivoire", "Côte d'Ivoire (la)", "CI", "CIV", "384"},
|
||||
{"Jamaica", "Jamaïque (la)", "JM", "JAM", "388"},
|
||||
{"Japan", "Japon (le)", "JP", "JPN", "392"},
|
||||
{"Kazakhstan", "Kazakhstan (le)", "KZ", "KAZ", "398"},
|
||||
{"Jordan", "Jordanie (la)", "JO", "JOR", "400"},
|
||||
{"Kenya", "Kenya (le)", "KE", "KEN", "404"},
|
||||
{"Korea (the Democratic People's Republic of)", "Corée (la République populaire démocratique de)", "KP", "PRK", "408"},
|
||||
{"Korea (the Republic of)", "Corée (la République de)", "KR", "KOR", "410"},
|
||||
{"Kuwait", "Koweït (le)", "KW", "KWT", "414"},
|
||||
{"Kyrgyzstan", "Kirghizistan (le)", "KG", "KGZ", "417"},
|
||||
{"Lao People's Democratic Republic (the)", "Lao, République démocratique populaire", "LA", "LAO", "418"},
|
||||
{"Lebanon", "Liban (le)", "LB", "LBN", "422"},
|
||||
{"Lesotho", "Lesotho (le)", "LS", "LSO", "426"},
|
||||
{"Latvia", "Lettonie (la)", "LV", "LVA", "428"},
|
||||
{"Liberia", "Libéria (le)", "LR", "LBR", "430"},
|
||||
{"Libya", "Libye (la)", "LY", "LBY", "434"},
|
||||
{"Liechtenstein", "Liechtenstein (le)", "LI", "LIE", "438"},
|
||||
{"Lithuania", "Lituanie (la)", "LT", "LTU", "440"},
|
||||
{"Luxembourg", "Luxembourg (le)", "LU", "LUX", "442"},
|
||||
{"Macao", "Macao", "MO", "MAC", "446"},
|
||||
{"Madagascar", "Madagascar", "MG", "MDG", "450"},
|
||||
{"Malawi", "Malawi (le)", "MW", "MWI", "454"},
|
||||
{"Malaysia", "Malaisie (la)", "MY", "MYS", "458"},
|
||||
{"Maldives", "Maldives (les)", "MV", "MDV", "462"},
|
||||
{"Mali", "Mali (le)", "ML", "MLI", "466"},
|
||||
{"Malta", "Malte", "MT", "MLT", "470"},
|
||||
{"Martinique", "Martinique (la)", "MQ", "MTQ", "474"},
|
||||
{"Mauritania", "Mauritanie (la)", "MR", "MRT", "478"},
|
||||
{"Mauritius", "Maurice", "MU", "MUS", "480"},
|
||||
{"Mexico", "Mexique (le)", "MX", "MEX", "484"},
|
||||
{"Monaco", "Monaco", "MC", "MCO", "492"},
|
||||
{"Mongolia", "Mongolie (la)", "MN", "MNG", "496"},
|
||||
{"Moldova (the Republic of)", "Moldova , République de", "MD", "MDA", "498"},
|
||||
{"Montenegro", "Monténégro (le)", "ME", "MNE", "499"},
|
||||
{"Montserrat", "Montserrat", "MS", "MSR", "500"},
|
||||
{"Morocco", "Maroc (le)", "MA", "MAR", "504"},
|
||||
{"Mozambique", "Mozambique (le)", "MZ", "MOZ", "508"},
|
||||
{"Oman", "Oman", "OM", "OMN", "512"},
|
||||
{"Namibia", "Namibie (la)", "NA", "NAM", "516"},
|
||||
{"Nauru", "Nauru", "NR", "NRU", "520"},
|
||||
{"Nepal", "Népal (le)", "NP", "NPL", "524"},
|
||||
{"Netherlands (the)", "Pays-Bas (les)", "NL", "NLD", "528"},
|
||||
{"Curaçao", "Curaçao", "CW", "CUW", "531"},
|
||||
{"Aruba", "Aruba", "AW", "ABW", "533"},
|
||||
{"Sint Maarten (Dutch part)", "Saint-Martin (partie néerlandaise)", "SX", "SXM", "534"},
|
||||
{"Bonaire, Sint Eustatius and Saba", "Bonaire, Saint-Eustache et Saba", "BQ", "BES", "535"},
|
||||
{"New Caledonia", "Nouvelle-Calédonie (la)", "NC", "NCL", "540"},
|
||||
{"Vanuatu", "Vanuatu (le)", "VU", "VUT", "548"},
|
||||
{"New Zealand", "Nouvelle-Zélande (la)", "NZ", "NZL", "554"},
|
||||
{"Nicaragua", "Nicaragua (le)", "NI", "NIC", "558"},
|
||||
{"Niger (the)", "Niger (le)", "NE", "NER", "562"},
|
||||
{"Nigeria", "Nigéria (le)", "NG", "NGA", "566"},
|
||||
{"Niue", "Niue", "NU", "NIU", "570"},
|
||||
{"Norfolk Island", "Norfolk (l'Île)", "NF", "NFK", "574"},
|
||||
{"Norway", "Norvège (la)", "NO", "NOR", "578"},
|
||||
{"Northern Mariana Islands (the)", "Mariannes du Nord (les Îles)", "MP", "MNP", "580"},
|
||||
{"United States Minor Outlying Islands (the)", "Îles mineures éloignées des États-Unis (les)", "UM", "UMI", "581"},
|
||||
{"Micronesia (Federated States of)", "Micronésie (États fédérés de)", "FM", "FSM", "583"},
|
||||
{"Marshall Islands (the)", "Marshall (Îles)", "MH", "MHL", "584"},
|
||||
{"Palau", "Palaos (les)", "PW", "PLW", "585"},
|
||||
{"Pakistan", "Pakistan (le)", "PK", "PAK", "586"},
|
||||
{"Panama", "Panama (le)", "PA", "PAN", "591"},
|
||||
{"Papua New Guinea", "Papouasie-Nouvelle-Guinée (la)", "PG", "PNG", "598"},
|
||||
{"Paraguay", "Paraguay (le)", "PY", "PRY", "600"},
|
||||
{"Peru", "Pérou (le)", "PE", "PER", "604"},
|
||||
{"Philippines (the)", "Philippines (les)", "PH", "PHL", "608"},
|
||||
{"Pitcairn", "Pitcairn", "PN", "PCN", "612"},
|
||||
{"Poland", "Pologne (la)", "PL", "POL", "616"},
|
||||
{"Portugal", "Portugal (le)", "PT", "PRT", "620"},
|
||||
{"Guinea-Bissau", "Guinée-Bissau (la)", "GW", "GNB", "624"},
|
||||
{"Timor-Leste", "Timor-Leste (le)", "TL", "TLS", "626"},
|
||||
{"Puerto Rico", "Porto Rico", "PR", "PRI", "630"},
|
||||
{"Qatar", "Qatar (le)", "QA", "QAT", "634"},
|
||||
{"Réunion", "Réunion (La)", "RE", "REU", "638"},
|
||||
{"Romania", "Roumanie (la)", "RO", "ROU", "642"},
|
||||
{"Russian Federation (the)", "Russie (la Fédération de)", "RU", "RUS", "643"},
|
||||
{"Rwanda", "Rwanda (le)", "RW", "RWA", "646"},
|
||||
{"Saint Barthélemy", "Saint-Barthélemy", "BL", "BLM", "652"},
|
||||
{"Saint Helena, Ascension and Tristan da Cunha", "Sainte-Hélène, Ascension et Tristan da Cunha", "SH", "SHN", "654"},
|
||||
{"Saint Kitts and Nevis", "Saint-Kitts-et-Nevis", "KN", "KNA", "659"},
|
||||
{"Anguilla", "Anguilla", "AI", "AIA", "660"},
|
||||
{"Saint Lucia", "Sainte-Lucie", "LC", "LCA", "662"},
|
||||
{"Saint Martin (French part)", "Saint-Martin (partie française)", "MF", "MAF", "663"},
|
||||
{"Saint Pierre and Miquelon", "Saint-Pierre-et-Miquelon", "PM", "SPM", "666"},
|
||||
{"Saint Vincent and the Grenadines", "Saint-Vincent-et-les Grenadines", "VC", "VCT", "670"},
|
||||
{"San Marino", "Saint-Marin", "SM", "SMR", "674"},
|
||||
{"Sao Tome and Principe", "Sao Tomé-et-Principe", "ST", "STP", "678"},
|
||||
{"Saudi Arabia", "Arabie saoudite (l')", "SA", "SAU", "682"},
|
||||
{"Senegal", "Sénégal (le)", "SN", "SEN", "686"},
|
||||
{"Serbia", "Serbie (la)", "RS", "SRB", "688"},
|
||||
{"Seychelles", "Seychelles (les)", "SC", "SYC", "690"},
|
||||
{"Sierra Leone", "Sierra Leone (la)", "SL", "SLE", "694"},
|
||||
{"Singapore", "Singapour", "SG", "SGP", "702"},
|
||||
{"Slovakia", "Slovaquie (la)", "SK", "SVK", "703"},
|
||||
{"Viet Nam", "Viet Nam (le)", "VN", "VNM", "704"},
|
||||
{"Slovenia", "Slovénie (la)", "SI", "SVN", "705"},
|
||||
{"Somalia", "Somalie (la)", "SO", "SOM", "706"},
|
||||
{"South Africa", "Afrique du Sud (l')", "ZA", "ZAF", "710"},
|
||||
{"Zimbabwe", "Zimbabwe (le)", "ZW", "ZWE", "716"},
|
||||
{"Spain", "Espagne (l')", "ES", "ESP", "724"},
|
||||
{"South Sudan", "Soudan du Sud (le)", "SS", "SSD", "728"},
|
||||
{"Sudan (the)", "Soudan (le)", "SD", "SDN", "729"},
|
||||
{"Western Sahara*", "Sahara occidental (le)*", "EH", "ESH", "732"},
|
||||
{"Suriname", "Suriname (le)", "SR", "SUR", "740"},
|
||||
{"Svalbard and Jan Mayen", "Svalbard et l'Île Jan Mayen (le)", "SJ", "SJM", "744"},
|
||||
{"Swaziland", "Swaziland (le)", "SZ", "SWZ", "748"},
|
||||
{"Sweden", "Suède (la)", "SE", "SWE", "752"},
|
||||
{"Switzerland", "Suisse (la)", "CH", "CHE", "756"},
|
||||
{"Syrian Arab Republic", "République arabe syrienne (la)", "SY", "SYR", "760"},
|
||||
{"Tajikistan", "Tadjikistan (le)", "TJ", "TJK", "762"},
|
||||
{"Thailand", "Thaïlande (la)", "TH", "THA", "764"},
|
||||
{"Togo", "Togo (le)", "TG", "TGO", "768"},
|
||||
{"Tokelau", "Tokelau (les)", "TK", "TKL", "772"},
|
||||
{"Tonga", "Tonga (les)", "TO", "TON", "776"},
|
||||
{"Trinidad and Tobago", "Trinité-et-Tobago (la)", "TT", "TTO", "780"},
|
||||
{"United Arab Emirates (the)", "Émirats arabes unis (les)", "AE", "ARE", "784"},
|
||||
{"Tunisia", "Tunisie (la)", "TN", "TUN", "788"},
|
||||
{"Turkey", "Turquie (la)", "TR", "TUR", "792"},
|
||||
{"Turkmenistan", "Turkménistan (le)", "TM", "TKM", "795"},
|
||||
{"Turks and Caicos Islands (the)", "Turks-et-Caïcos (les Îles)", "TC", "TCA", "796"},
|
||||
{"Tuvalu", "Tuvalu (les)", "TV", "TUV", "798"},
|
||||
{"Uganda", "Ouganda (l')", "UG", "UGA", "800"},
|
||||
{"Ukraine", "Ukraine (l')", "UA", "UKR", "804"},
|
||||
{"Macedonia (the former Yugoslav Republic of)", "Macédoine (l'ex‑République yougoslave de)", "MK", "MKD", "807"},
|
||||
{"Egypt", "Égypte (l')", "EG", "EGY", "818"},
|
||||
{"United Kingdom of Great Britain and Northern Ireland (the)", "Royaume-Uni de Grande-Bretagne et d'Irlande du Nord (le)", "GB", "GBR", "826"},
|
||||
{"Guernsey", "Guernesey", "GG", "GGY", "831"},
|
||||
{"Jersey", "Jersey", "JE", "JEY", "832"},
|
||||
{"Isle of Man", "Île de Man", "IM", "IMN", "833"},
|
||||
{"Tanzania, United Republic of", "Tanzanie, République-Unie de", "TZ", "TZA", "834"},
|
||||
{"United States of America (the)", "États-Unis d'Amérique (les)", "US", "USA", "840"},
|
||||
{"Virgin Islands (U.S.)", "Vierges des États-Unis (les Îles)", "VI", "VIR", "850"},
|
||||
{"Burkina Faso", "Burkina Faso (le)", "BF", "BFA", "854"},
|
||||
{"Uruguay", "Uruguay (l')", "UY", "URY", "858"},
|
||||
{"Uzbekistan", "Ouzbékistan (l')", "UZ", "UZB", "860"},
|
||||
{"Venezuela (Bolivarian Republic of)", "Venezuela (République bolivarienne du)", "VE", "VEN", "862"},
|
||||
{"Wallis and Futuna", "Wallis-et-Futuna", "WF", "WLF", "876"},
|
||||
{"Samoa", "Samoa (le)", "WS", "WSM", "882"},
|
||||
{"Yemen", "Yémen (le)", "YE", "YEM", "887"},
|
||||
{"Zambia", "Zambie (la)", "ZM", "ZMB", "894"},
|
||||
}
|
||||
|
||||
// ISO4217List is the list of ISO currency codes
|
||||
var ISO4217List = []string{
|
||||
"AED", "AFN", "ALL", "AMD", "ANG", "AOA", "ARS", "AUD", "AWG", "AZN",
|
||||
"BAM", "BBD", "BDT", "BGN", "BHD", "BIF", "BMD", "BND", "BOB", "BOV", "BRL", "BSD", "BTN", "BWP", "BYN", "BZD",
|
||||
"CAD", "CDF", "CHE", "CHF", "CHW", "CLF", "CLP", "CNY", "COP", "COU", "CRC", "CUC", "CUP", "CVE", "CZK",
|
||||
"DJF", "DKK", "DOP", "DZD",
|
||||
"EGP", "ERN", "ETB", "EUR",
|
||||
"FJD", "FKP",
|
||||
"GBP", "GEL", "GHS", "GIP", "GMD", "GNF", "GTQ", "GYD",
|
||||
"HKD", "HNL", "HRK", "HTG", "HUF",
|
||||
"IDR", "ILS", "INR", "IQD", "IRR", "ISK",
|
||||
"JMD", "JOD", "JPY",
|
||||
"KES", "KGS", "KHR", "KMF", "KPW", "KRW", "KWD", "KYD", "KZT",
|
||||
"LAK", "LBP", "LKR", "LRD", "LSL", "LYD",
|
||||
"MAD", "MDL", "MGA", "MKD", "MMK", "MNT", "MOP", "MRO", "MUR", "MVR", "MWK", "MXN", "MXV", "MYR", "MZN",
|
||||
"NAD", "NGN", "NIO", "NOK", "NPR", "NZD",
|
||||
"OMR",
|
||||
"PAB", "PEN", "PGK", "PHP", "PKR", "PLN", "PYG",
|
||||
"QAR",
|
||||
"RON", "RSD", "RUB", "RWF",
|
||||
"SAR", "SBD", "SCR", "SDG", "SEK", "SGD", "SHP", "SLL", "SOS", "SRD", "SSP", "STD", "STN", "SVC", "SYP", "SZL",
|
||||
"THB", "TJS", "TMT", "TND", "TOP", "TRY", "TTD", "TWD", "TZS",
|
||||
"UAH", "UGX", "USD", "USN", "UYI", "UYU", "UYW", "UZS",
|
||||
"VEF", "VES", "VND", "VUV",
|
||||
"WST",
|
||||
"XAF", "XAG", "XAU", "XBA", "XBB", "XBC", "XBD", "XCD", "XDR", "XOF", "XPD", "XPF", "XPT", "XSU", "XTS", "XUA", "XXX",
|
||||
"YER",
|
||||
"ZAR", "ZMW", "ZWL",
|
||||
}
|
||||
|
||||
// ISO693Entry stores ISO language codes
|
||||
type ISO693Entry struct {
|
||||
Alpha3bCode string
|
||||
Alpha2Code string
|
||||
English string
|
||||
}
|
||||
|
||||
//ISO693List based on http://data.okfn.org/data/core/language-codes/r/language-codes-3b2.json
|
||||
var ISO693List = []ISO693Entry{
|
||||
{Alpha3bCode: "aar", Alpha2Code: "aa", English: "Afar"},
|
||||
{Alpha3bCode: "abk", Alpha2Code: "ab", English: "Abkhazian"},
|
||||
{Alpha3bCode: "afr", Alpha2Code: "af", English: "Afrikaans"},
|
||||
{Alpha3bCode: "aka", Alpha2Code: "ak", English: "Akan"},
|
||||
{Alpha3bCode: "alb", Alpha2Code: "sq", English: "Albanian"},
|
||||
{Alpha3bCode: "amh", Alpha2Code: "am", English: "Amharic"},
|
||||
{Alpha3bCode: "ara", Alpha2Code: "ar", English: "Arabic"},
|
||||
{Alpha3bCode: "arg", Alpha2Code: "an", English: "Aragonese"},
|
||||
{Alpha3bCode: "arm", Alpha2Code: "hy", English: "Armenian"},
|
||||
{Alpha3bCode: "asm", Alpha2Code: "as", English: "Assamese"},
|
||||
{Alpha3bCode: "ava", Alpha2Code: "av", English: "Avaric"},
|
||||
{Alpha3bCode: "ave", Alpha2Code: "ae", English: "Avestan"},
|
||||
{Alpha3bCode: "aym", Alpha2Code: "ay", English: "Aymara"},
|
||||
{Alpha3bCode: "aze", Alpha2Code: "az", English: "Azerbaijani"},
|
||||
{Alpha3bCode: "bak", Alpha2Code: "ba", English: "Bashkir"},
|
||||
{Alpha3bCode: "bam", Alpha2Code: "bm", English: "Bambara"},
|
||||
{Alpha3bCode: "baq", Alpha2Code: "eu", English: "Basque"},
|
||||
{Alpha3bCode: "bel", Alpha2Code: "be", English: "Belarusian"},
|
||||
{Alpha3bCode: "ben", Alpha2Code: "bn", English: "Bengali"},
|
||||
{Alpha3bCode: "bih", Alpha2Code: "bh", English: "Bihari languages"},
|
||||
{Alpha3bCode: "bis", Alpha2Code: "bi", English: "Bislama"},
|
||||
{Alpha3bCode: "bos", Alpha2Code: "bs", English: "Bosnian"},
|
||||
{Alpha3bCode: "bre", Alpha2Code: "br", English: "Breton"},
|
||||
{Alpha3bCode: "bul", Alpha2Code: "bg", English: "Bulgarian"},
|
||||
{Alpha3bCode: "bur", Alpha2Code: "my", English: "Burmese"},
|
||||
{Alpha3bCode: "cat", Alpha2Code: "ca", English: "Catalan; Valencian"},
|
||||
{Alpha3bCode: "cha", Alpha2Code: "ch", English: "Chamorro"},
|
||||
{Alpha3bCode: "che", Alpha2Code: "ce", English: "Chechen"},
|
||||
{Alpha3bCode: "chi", Alpha2Code: "zh", English: "Chinese"},
|
||||
{Alpha3bCode: "chu", Alpha2Code: "cu", English: "Church Slavic; Old Slavonic; Church Slavonic; Old Bulgarian; Old Church Slavonic"},
|
||||
{Alpha3bCode: "chv", Alpha2Code: "cv", English: "Chuvash"},
|
||||
{Alpha3bCode: "cor", Alpha2Code: "kw", English: "Cornish"},
|
||||
{Alpha3bCode: "cos", Alpha2Code: "co", English: "Corsican"},
|
||||
{Alpha3bCode: "cre", Alpha2Code: "cr", English: "Cree"},
|
||||
{Alpha3bCode: "cze", Alpha2Code: "cs", English: "Czech"},
|
||||
{Alpha3bCode: "dan", Alpha2Code: "da", English: "Danish"},
|
||||
{Alpha3bCode: "div", Alpha2Code: "dv", English: "Divehi; Dhivehi; Maldivian"},
|
||||
{Alpha3bCode: "dut", Alpha2Code: "nl", English: "Dutch; Flemish"},
|
||||
{Alpha3bCode: "dzo", Alpha2Code: "dz", English: "Dzongkha"},
|
||||
{Alpha3bCode: "eng", Alpha2Code: "en", English: "English"},
|
||||
{Alpha3bCode: "epo", Alpha2Code: "eo", English: "Esperanto"},
|
||||
{Alpha3bCode: "est", Alpha2Code: "et", English: "Estonian"},
|
||||
{Alpha3bCode: "ewe", Alpha2Code: "ee", English: "Ewe"},
|
||||
{Alpha3bCode: "fao", Alpha2Code: "fo", English: "Faroese"},
|
||||
{Alpha3bCode: "fij", Alpha2Code: "fj", English: "Fijian"},
|
||||
{Alpha3bCode: "fin", Alpha2Code: "fi", English: "Finnish"},
|
||||
{Alpha3bCode: "fre", Alpha2Code: "fr", English: "French"},
|
||||
{Alpha3bCode: "fry", Alpha2Code: "fy", English: "Western Frisian"},
|
||||
{Alpha3bCode: "ful", Alpha2Code: "ff", English: "Fulah"},
|
||||
{Alpha3bCode: "geo", Alpha2Code: "ka", English: "Georgian"},
|
||||
{Alpha3bCode: "ger", Alpha2Code: "de", English: "German"},
|
||||
{Alpha3bCode: "gla", Alpha2Code: "gd", English: "Gaelic; Scottish Gaelic"},
|
||||
{Alpha3bCode: "gle", Alpha2Code: "ga", English: "Irish"},
|
||||
{Alpha3bCode: "glg", Alpha2Code: "gl", English: "Galician"},
|
||||
{Alpha3bCode: "glv", Alpha2Code: "gv", English: "Manx"},
|
||||
{Alpha3bCode: "gre", Alpha2Code: "el", English: "Greek, Modern (1453-)"},
|
||||
{Alpha3bCode: "grn", Alpha2Code: "gn", English: "Guarani"},
|
||||
{Alpha3bCode: "guj", Alpha2Code: "gu", English: "Gujarati"},
|
||||
{Alpha3bCode: "hat", Alpha2Code: "ht", English: "Haitian; Haitian Creole"},
|
||||
{Alpha3bCode: "hau", Alpha2Code: "ha", English: "Hausa"},
|
||||
{Alpha3bCode: "heb", Alpha2Code: "he", English: "Hebrew"},
|
||||
{Alpha3bCode: "her", Alpha2Code: "hz", English: "Herero"},
|
||||
{Alpha3bCode: "hin", Alpha2Code: "hi", English: "Hindi"},
|
||||
{Alpha3bCode: "hmo", Alpha2Code: "ho", English: "Hiri Motu"},
|
||||
{Alpha3bCode: "hrv", Alpha2Code: "hr", English: "Croatian"},
|
||||
{Alpha3bCode: "hun", Alpha2Code: "hu", English: "Hungarian"},
|
||||
{Alpha3bCode: "ibo", Alpha2Code: "ig", English: "Igbo"},
|
||||
{Alpha3bCode: "ice", Alpha2Code: "is", English: "Icelandic"},
|
||||
{Alpha3bCode: "ido", Alpha2Code: "io", English: "Ido"},
|
||||
{Alpha3bCode: "iii", Alpha2Code: "ii", English: "Sichuan Yi; Nuosu"},
|
||||
{Alpha3bCode: "iku", Alpha2Code: "iu", English: "Inuktitut"},
|
||||
{Alpha3bCode: "ile", Alpha2Code: "ie", English: "Interlingue; Occidental"},
|
||||
{Alpha3bCode: "ina", Alpha2Code: "ia", English: "Interlingua (International Auxiliary Language Association)"},
|
||||
{Alpha3bCode: "ind", Alpha2Code: "id", English: "Indonesian"},
|
||||
{Alpha3bCode: "ipk", Alpha2Code: "ik", English: "Inupiaq"},
|
||||
{Alpha3bCode: "ita", Alpha2Code: "it", English: "Italian"},
|
||||
{Alpha3bCode: "jav", Alpha2Code: "jv", English: "Javanese"},
|
||||
{Alpha3bCode: "jpn", Alpha2Code: "ja", English: "Japanese"},
|
||||
{Alpha3bCode: "kal", Alpha2Code: "kl", English: "Kalaallisut; Greenlandic"},
|
||||
{Alpha3bCode: "kan", Alpha2Code: "kn", English: "Kannada"},
|
||||
{Alpha3bCode: "kas", Alpha2Code: "ks", English: "Kashmiri"},
|
||||
{Alpha3bCode: "kau", Alpha2Code: "kr", English: "Kanuri"},
|
||||
{Alpha3bCode: "kaz", Alpha2Code: "kk", English: "Kazakh"},
|
||||
{Alpha3bCode: "khm", Alpha2Code: "km", English: "Central Khmer"},
|
||||
{Alpha3bCode: "kik", Alpha2Code: "ki", English: "Kikuyu; Gikuyu"},
|
||||
{Alpha3bCode: "kin", Alpha2Code: "rw", English: "Kinyarwanda"},
|
||||
{Alpha3bCode: "kir", Alpha2Code: "ky", English: "Kirghiz; Kyrgyz"},
|
||||
{Alpha3bCode: "kom", Alpha2Code: "kv", English: "Komi"},
|
||||
{Alpha3bCode: "kon", Alpha2Code: "kg", English: "Kongo"},
|
||||
{Alpha3bCode: "kor", Alpha2Code: "ko", English: "Korean"},
|
||||
{Alpha3bCode: "kua", Alpha2Code: "kj", English: "Kuanyama; Kwanyama"},
|
||||
{Alpha3bCode: "kur", Alpha2Code: "ku", English: "Kurdish"},
|
||||
{Alpha3bCode: "lao", Alpha2Code: "lo", English: "Lao"},
|
||||
{Alpha3bCode: "lat", Alpha2Code: "la", English: "Latin"},
|
||||
{Alpha3bCode: "lav", Alpha2Code: "lv", English: "Latvian"},
|
||||
{Alpha3bCode: "lim", Alpha2Code: "li", English: "Limburgan; Limburger; Limburgish"},
|
||||
{Alpha3bCode: "lin", Alpha2Code: "ln", English: "Lingala"},
|
||||
{Alpha3bCode: "lit", Alpha2Code: "lt", English: "Lithuanian"},
|
||||
{Alpha3bCode: "ltz", Alpha2Code: "lb", English: "Luxembourgish; Letzeburgesch"},
|
||||
{Alpha3bCode: "lub", Alpha2Code: "lu", English: "Luba-Katanga"},
|
||||
{Alpha3bCode: "lug", Alpha2Code: "lg", English: "Ganda"},
|
||||
{Alpha3bCode: "mac", Alpha2Code: "mk", English: "Macedonian"},
|
||||
{Alpha3bCode: "mah", Alpha2Code: "mh", English: "Marshallese"},
|
||||
{Alpha3bCode: "mal", Alpha2Code: "ml", English: "Malayalam"},
|
||||
{Alpha3bCode: "mao", Alpha2Code: "mi", English: "Maori"},
|
||||
{Alpha3bCode: "mar", Alpha2Code: "mr", English: "Marathi"},
|
||||
{Alpha3bCode: "may", Alpha2Code: "ms", English: "Malay"},
|
||||
{Alpha3bCode: "mlg", Alpha2Code: "mg", English: "Malagasy"},
|
||||
{Alpha3bCode: "mlt", Alpha2Code: "mt", English: "Maltese"},
|
||||
{Alpha3bCode: "mon", Alpha2Code: "mn", English: "Mongolian"},
|
||||
{Alpha3bCode: "nau", Alpha2Code: "na", English: "Nauru"},
|
||||
{Alpha3bCode: "nav", Alpha2Code: "nv", English: "Navajo; Navaho"},
|
||||
{Alpha3bCode: "nbl", Alpha2Code: "nr", English: "Ndebele, South; South Ndebele"},
|
||||
{Alpha3bCode: "nde", Alpha2Code: "nd", English: "Ndebele, North; North Ndebele"},
|
||||
{Alpha3bCode: "ndo", Alpha2Code: "ng", English: "Ndonga"},
|
||||
{Alpha3bCode: "nep", Alpha2Code: "ne", English: "Nepali"},
|
||||
{Alpha3bCode: "nno", Alpha2Code: "nn", English: "Norwegian Nynorsk; Nynorsk, Norwegian"},
|
||||
{Alpha3bCode: "nob", Alpha2Code: "nb", English: "Bokmål, Norwegian; Norwegian Bokmål"},
|
||||
{Alpha3bCode: "nor", Alpha2Code: "no", English: "Norwegian"},
|
||||
{Alpha3bCode: "nya", Alpha2Code: "ny", English: "Chichewa; Chewa; Nyanja"},
|
||||
{Alpha3bCode: "oci", Alpha2Code: "oc", English: "Occitan (post 1500); Provençal"},
|
||||
{Alpha3bCode: "oji", Alpha2Code: "oj", English: "Ojibwa"},
|
||||
{Alpha3bCode: "ori", Alpha2Code: "or", English: "Oriya"},
|
||||
{Alpha3bCode: "orm", Alpha2Code: "om", English: "Oromo"},
|
||||
{Alpha3bCode: "oss", Alpha2Code: "os", English: "Ossetian; Ossetic"},
|
||||
{Alpha3bCode: "pan", Alpha2Code: "pa", English: "Panjabi; Punjabi"},
|
||||
{Alpha3bCode: "per", Alpha2Code: "fa", English: "Persian"},
|
||||
{Alpha3bCode: "pli", Alpha2Code: "pi", English: "Pali"},
|
||||
{Alpha3bCode: "pol", Alpha2Code: "pl", English: "Polish"},
|
||||
{Alpha3bCode: "por", Alpha2Code: "pt", English: "Portuguese"},
|
||||
{Alpha3bCode: "pus", Alpha2Code: "ps", English: "Pushto; Pashto"},
|
||||
{Alpha3bCode: "que", Alpha2Code: "qu", English: "Quechua"},
|
||||
{Alpha3bCode: "roh", Alpha2Code: "rm", English: "Romansh"},
|
||||
{Alpha3bCode: "rum", Alpha2Code: "ro", English: "Romanian; Moldavian; Moldovan"},
|
||||
{Alpha3bCode: "run", Alpha2Code: "rn", English: "Rundi"},
|
||||
{Alpha3bCode: "rus", Alpha2Code: "ru", English: "Russian"},
|
||||
{Alpha3bCode: "sag", Alpha2Code: "sg", English: "Sango"},
|
||||
{Alpha3bCode: "san", Alpha2Code: "sa", English: "Sanskrit"},
|
||||
{Alpha3bCode: "sin", Alpha2Code: "si", English: "Sinhala; Sinhalese"},
|
||||
{Alpha3bCode: "slo", Alpha2Code: "sk", English: "Slovak"},
|
||||
{Alpha3bCode: "slv", Alpha2Code: "sl", English: "Slovenian"},
|
||||
{Alpha3bCode: "sme", Alpha2Code: "se", English: "Northern Sami"},
|
||||
{Alpha3bCode: "smo", Alpha2Code: "sm", English: "Samoan"},
|
||||
{Alpha3bCode: "sna", Alpha2Code: "sn", English: "Shona"},
|
||||
{Alpha3bCode: "snd", Alpha2Code: "sd", English: "Sindhi"},
|
||||
{Alpha3bCode: "som", Alpha2Code: "so", English: "Somali"},
|
||||
{Alpha3bCode: "sot", Alpha2Code: "st", English: "Sotho, Southern"},
|
||||
{Alpha3bCode: "spa", Alpha2Code: "es", English: "Spanish; Castilian"},
|
||||
{Alpha3bCode: "srd", Alpha2Code: "sc", English: "Sardinian"},
|
||||
{Alpha3bCode: "srp", Alpha2Code: "sr", English: "Serbian"},
|
||||
{Alpha3bCode: "ssw", Alpha2Code: "ss", English: "Swati"},
|
||||
{Alpha3bCode: "sun", Alpha2Code: "su", English: "Sundanese"},
|
||||
{Alpha3bCode: "swa", Alpha2Code: "sw", English: "Swahili"},
|
||||
{Alpha3bCode: "swe", Alpha2Code: "sv", English: "Swedish"},
|
||||
{Alpha3bCode: "tah", Alpha2Code: "ty", English: "Tahitian"},
|
||||
{Alpha3bCode: "tam", Alpha2Code: "ta", English: "Tamil"},
|
||||
{Alpha3bCode: "tat", Alpha2Code: "tt", English: "Tatar"},
|
||||
{Alpha3bCode: "tel", Alpha2Code: "te", English: "Telugu"},
|
||||
{Alpha3bCode: "tgk", Alpha2Code: "tg", English: "Tajik"},
|
||||
{Alpha3bCode: "tgl", Alpha2Code: "tl", English: "Tagalog"},
|
||||
{Alpha3bCode: "tha", Alpha2Code: "th", English: "Thai"},
|
||||
{Alpha3bCode: "tib", Alpha2Code: "bo", English: "Tibetan"},
|
||||
{Alpha3bCode: "tir", Alpha2Code: "ti", English: "Tigrinya"},
|
||||
{Alpha3bCode: "ton", Alpha2Code: "to", English: "Tonga (Tonga Islands)"},
|
||||
{Alpha3bCode: "tsn", Alpha2Code: "tn", English: "Tswana"},
|
||||
{Alpha3bCode: "tso", Alpha2Code: "ts", English: "Tsonga"},
|
||||
{Alpha3bCode: "tuk", Alpha2Code: "tk", English: "Turkmen"},
|
||||
{Alpha3bCode: "tur", Alpha2Code: "tr", English: "Turkish"},
|
||||
{Alpha3bCode: "twi", Alpha2Code: "tw", English: "Twi"},
|
||||
{Alpha3bCode: "uig", Alpha2Code: "ug", English: "Uighur; Uyghur"},
|
||||
{Alpha3bCode: "ukr", Alpha2Code: "uk", English: "Ukrainian"},
|
||||
{Alpha3bCode: "urd", Alpha2Code: "ur", English: "Urdu"},
|
||||
{Alpha3bCode: "uzb", Alpha2Code: "uz", English: "Uzbek"},
|
||||
{Alpha3bCode: "ven", Alpha2Code: "ve", English: "Venda"},
|
||||
{Alpha3bCode: "vie", Alpha2Code: "vi", English: "Vietnamese"},
|
||||
{Alpha3bCode: "vol", Alpha2Code: "vo", English: "Volapük"},
|
||||
{Alpha3bCode: "wel", Alpha2Code: "cy", English: "Welsh"},
|
||||
{Alpha3bCode: "wln", Alpha2Code: "wa", English: "Walloon"},
|
||||
{Alpha3bCode: "wol", Alpha2Code: "wo", English: "Wolof"},
|
||||
{Alpha3bCode: "xho", Alpha2Code: "xh", English: "Xhosa"},
|
||||
{Alpha3bCode: "yid", Alpha2Code: "yi", English: "Yiddish"},
|
||||
{Alpha3bCode: "yor", Alpha2Code: "yo", English: "Yoruba"},
|
||||
{Alpha3bCode: "zha", Alpha2Code: "za", English: "Zhuang; Chuang"},
|
||||
{Alpha3bCode: "zul", Alpha2Code: "zu", English: "Zulu"},
|
||||
}
|
270
vendor/github.com/asaskevich/govalidator/utils.go
generated
vendored
Normal file
270
vendor/github.com/asaskevich/govalidator/utils.go
generated
vendored
Normal file
@ -0,0 +1,270 @@
|
||||
package govalidator
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"html"
|
||||
"math"
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// Contains checks if the string contains the substring.
|
||||
func Contains(str, substring string) bool {
|
||||
return strings.Contains(str, substring)
|
||||
}
|
||||
|
||||
// Matches checks if string matches the pattern (pattern is regular expression)
|
||||
// In case of error return false
|
||||
func Matches(str, pattern string) bool {
|
||||
match, _ := regexp.MatchString(pattern, str)
|
||||
return match
|
||||
}
|
||||
|
||||
// LeftTrim trims characters from the left side of the input.
|
||||
// If second argument is empty, it will remove leading spaces.
|
||||
func LeftTrim(str, chars string) string {
|
||||
if chars == "" {
|
||||
return strings.TrimLeftFunc(str, unicode.IsSpace)
|
||||
}
|
||||
r, _ := regexp.Compile("^[" + chars + "]+")
|
||||
return r.ReplaceAllString(str, "")
|
||||
}
|
||||
|
||||
// RightTrim trims characters from the right side of the input.
|
||||
// If second argument is empty, it will remove trailing spaces.
|
||||
func RightTrim(str, chars string) string {
|
||||
if chars == "" {
|
||||
return strings.TrimRightFunc(str, unicode.IsSpace)
|
||||
}
|
||||
r, _ := regexp.Compile("[" + chars + "]+$")
|
||||
return r.ReplaceAllString(str, "")
|
||||
}
|
||||
|
||||
// Trim trims characters from both sides of the input.
|
||||
// If second argument is empty, it will remove spaces.
|
||||
func Trim(str, chars string) string {
|
||||
return LeftTrim(RightTrim(str, chars), chars)
|
||||
}
|
||||
|
||||
// WhiteList removes characters that do not appear in the whitelist.
|
||||
func WhiteList(str, chars string) string {
|
||||
pattern := "[^" + chars + "]+"
|
||||
r, _ := regexp.Compile(pattern)
|
||||
return r.ReplaceAllString(str, "")
|
||||
}
|
||||
|
||||
// BlackList removes characters that appear in the blacklist.
|
||||
func BlackList(str, chars string) string {
|
||||
pattern := "[" + chars + "]+"
|
||||
r, _ := regexp.Compile(pattern)
|
||||
return r.ReplaceAllString(str, "")
|
||||
}
|
||||
|
||||
// StripLow removes characters with a numerical value < 32 and 127, mostly control characters.
|
||||
// If keep_new_lines is true, newline characters are preserved (\n and \r, hex 0xA and 0xD).
|
||||
func StripLow(str string, keepNewLines bool) string {
|
||||
chars := ""
|
||||
if keepNewLines {
|
||||
chars = "\x00-\x09\x0B\x0C\x0E-\x1F\x7F"
|
||||
} else {
|
||||
chars = "\x00-\x1F\x7F"
|
||||
}
|
||||
return BlackList(str, chars)
|
||||
}
|
||||
|
||||
// ReplacePattern replaces regular expression pattern in string
|
||||
func ReplacePattern(str, pattern, replace string) string {
|
||||
r, _ := regexp.Compile(pattern)
|
||||
return r.ReplaceAllString(str, replace)
|
||||
}
|
||||
|
||||
// Escape replaces <, >, & and " with HTML entities.
|
||||
var Escape = html.EscapeString
|
||||
|
||||
func addSegment(inrune, segment []rune) []rune {
|
||||
if len(segment) == 0 {
|
||||
return inrune
|
||||
}
|
||||
if len(inrune) != 0 {
|
||||
inrune = append(inrune, '_')
|
||||
}
|
||||
inrune = append(inrune, segment...)
|
||||
return inrune
|
||||
}
|
||||
|
||||
// UnderscoreToCamelCase converts from underscore separated form to camel case form.
|
||||
// Ex.: my_func => MyFunc
|
||||
func UnderscoreToCamelCase(s string) string {
|
||||
return strings.Replace(strings.Title(strings.Replace(strings.ToLower(s), "_", " ", -1)), " ", "", -1)
|
||||
}
|
||||
|
||||
// CamelCaseToUnderscore converts from camel case form to underscore separated form.
|
||||
// Ex.: MyFunc => my_func
|
||||
func CamelCaseToUnderscore(str string) string {
|
||||
var output []rune
|
||||
var segment []rune
|
||||
for _, r := range str {
|
||||
|
||||
// not treat number as separate segment
|
||||
if !unicode.IsLower(r) && string(r) != "_" && !unicode.IsNumber(r) {
|
||||
output = addSegment(output, segment)
|
||||
segment = nil
|
||||
}
|
||||
segment = append(segment, unicode.ToLower(r))
|
||||
}
|
||||
output = addSegment(output, segment)
|
||||
return string(output)
|
||||
}
|
||||
|
||||
// Reverse returns reversed string
|
||||
func Reverse(s string) string {
|
||||
r := []rune(s)
|
||||
for i, j := 0, len(r)-1; i < j; i, j = i+1, j-1 {
|
||||
r[i], r[j] = r[j], r[i]
|
||||
}
|
||||
return string(r)
|
||||
}
|
||||
|
||||
// GetLines splits string by "\n" and return array of lines
|
||||
func GetLines(s string) []string {
|
||||
return strings.Split(s, "\n")
|
||||
}
|
||||
|
||||
// GetLine returns specified line of multiline string
|
||||
func GetLine(s string, index int) (string, error) {
|
||||
lines := GetLines(s)
|
||||
if index < 0 || index >= len(lines) {
|
||||
return "", errors.New("line index out of bounds")
|
||||
}
|
||||
return lines[index], nil
|
||||
}
|
||||
|
||||
// RemoveTags removes all tags from HTML string
|
||||
func RemoveTags(s string) string {
|
||||
return ReplacePattern(s, "<[^>]*>", "")
|
||||
}
|
||||
|
||||
// SafeFileName returns safe string that can be used in file names
|
||||
func SafeFileName(str string) string {
|
||||
name := strings.ToLower(str)
|
||||
name = path.Clean(path.Base(name))
|
||||
name = strings.Trim(name, " ")
|
||||
separators, err := regexp.Compile(`[ &_=+:]`)
|
||||
if err == nil {
|
||||
name = separators.ReplaceAllString(name, "-")
|
||||
}
|
||||
legal, err := regexp.Compile(`[^[:alnum:]-.]`)
|
||||
if err == nil {
|
||||
name = legal.ReplaceAllString(name, "")
|
||||
}
|
||||
for strings.Contains(name, "--") {
|
||||
name = strings.Replace(name, "--", "-", -1)
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
// NormalizeEmail canonicalize an email address.
|
||||
// The local part of the email address is lowercased for all domains; the hostname is always lowercased and
|
||||
// the local part of the email address is always lowercased for hosts that are known to be case-insensitive (currently only GMail).
|
||||
// Normalization follows special rules for known providers: currently, GMail addresses have dots removed in the local part and
|
||||
// are stripped of tags (e.g. some.one+tag@gmail.com becomes someone@gmail.com) and all @googlemail.com addresses are
|
||||
// normalized to @gmail.com.
|
||||
func NormalizeEmail(str string) (string, error) {
|
||||
if !IsEmail(str) {
|
||||
return "", fmt.Errorf("%s is not an email", str)
|
||||
}
|
||||
parts := strings.Split(str, "@")
|
||||
parts[0] = strings.ToLower(parts[0])
|
||||
parts[1] = strings.ToLower(parts[1])
|
||||
if parts[1] == "gmail.com" || parts[1] == "googlemail.com" {
|
||||
parts[1] = "gmail.com"
|
||||
parts[0] = strings.Split(ReplacePattern(parts[0], `\.`, ""), "+")[0]
|
||||
}
|
||||
return strings.Join(parts, "@"), nil
|
||||
}
|
||||
|
||||
// Truncate a string to the closest length without breaking words.
|
||||
func Truncate(str string, length int, ending string) string {
|
||||
var aftstr, befstr string
|
||||
if len(str) > length {
|
||||
words := strings.Fields(str)
|
||||
before, present := 0, 0
|
||||
for i := range words {
|
||||
befstr = aftstr
|
||||
before = present
|
||||
aftstr = aftstr + words[i] + " "
|
||||
present = len(aftstr)
|
||||
if present > length && i != 0 {
|
||||
if (length - before) < (present - length) {
|
||||
return Trim(befstr, " /\\.,\"'#!?&@+-") + ending
|
||||
}
|
||||
return Trim(aftstr, " /\\.,\"'#!?&@+-") + ending
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return str
|
||||
}
|
||||
|
||||
// PadLeft pads left side of a string if size of string is less then indicated pad length
|
||||
func PadLeft(str string, padStr string, padLen int) string {
|
||||
return buildPadStr(str, padStr, padLen, true, false)
|
||||
}
|
||||
|
||||
// PadRight pads right side of a string if size of string is less then indicated pad length
|
||||
func PadRight(str string, padStr string, padLen int) string {
|
||||
return buildPadStr(str, padStr, padLen, false, true)
|
||||
}
|
||||
|
||||
// PadBoth pads both sides of a string if size of string is less then indicated pad length
|
||||
func PadBoth(str string, padStr string, padLen int) string {
|
||||
return buildPadStr(str, padStr, padLen, true, true)
|
||||
}
|
||||
|
||||
// PadString either left, right or both sides.
|
||||
// Note that padding string can be unicode and more then one character
|
||||
func buildPadStr(str string, padStr string, padLen int, padLeft bool, padRight bool) string {
|
||||
|
||||
// When padded length is less then the current string size
|
||||
if padLen < utf8.RuneCountInString(str) {
|
||||
return str
|
||||
}
|
||||
|
||||
padLen -= utf8.RuneCountInString(str)
|
||||
|
||||
targetLen := padLen
|
||||
|
||||
targetLenLeft := targetLen
|
||||
targetLenRight := targetLen
|
||||
if padLeft && padRight {
|
||||
targetLenLeft = padLen / 2
|
||||
targetLenRight = padLen - targetLenLeft
|
||||
}
|
||||
|
||||
strToRepeatLen := utf8.RuneCountInString(padStr)
|
||||
|
||||
repeatTimes := int(math.Ceil(float64(targetLen) / float64(strToRepeatLen)))
|
||||
repeatedString := strings.Repeat(padStr, repeatTimes)
|
||||
|
||||
leftSide := ""
|
||||
if padLeft {
|
||||
leftSide = repeatedString[0:targetLenLeft]
|
||||
}
|
||||
|
||||
rightSide := ""
|
||||
if padRight {
|
||||
rightSide = repeatedString[0:targetLenRight]
|
||||
}
|
||||
|
||||
return leftSide + str + rightSide
|
||||
}
|
||||
|
||||
// TruncatingErrorf removes extra args from fmt.Errorf if not formatted in the str object
|
||||
func TruncatingErrorf(str string, args ...interface{}) error {
|
||||
n := strings.Count(str, "%s")
|
||||
return fmt.Errorf(str, args[:n]...)
|
||||
}
|
1769
vendor/github.com/asaskevich/govalidator/validator.go
generated
vendored
Normal file
1769
vendor/github.com/asaskevich/govalidator/validator.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
15
vendor/github.com/asaskevich/govalidator/wercker.yml
generated
vendored
Normal file
15
vendor/github.com/asaskevich/govalidator/wercker.yml
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
box: golang
|
||||
build:
|
||||
steps:
|
||||
- setup-go-workspace
|
||||
|
||||
- script:
|
||||
name: go get
|
||||
code: |
|
||||
go version
|
||||
go get -t ./...
|
||||
|
||||
- script:
|
||||
name: go test
|
||||
code: |
|
||||
go test -race -v ./...
|
75
vendor/github.com/containers/image/v5/pkg/cli/sigstore/params/sigstore.go
generated
vendored
Normal file
75
vendor/github.com/containers/image/v5/pkg/cli/sigstore/params/sigstore.go
generated
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
package params
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
// SigningParameterFile collects parameters used for creating sigstore signatures.
|
||||
//
|
||||
// To consume such a file, most callers should use c/image/pkg/cli/sigstore instead
|
||||
// of dealing with this type explicitly using ParseFile.
|
||||
//
|
||||
// This type is exported primarily to allow creating parameter files programmatically
|
||||
// (and eventually this subpackage should provide an API to convert this type into
|
||||
// the appropriate file contents, so that callers don’t need to do that manually).
|
||||
type SigningParameterFile struct {
|
||||
// Keep this in sync with docs/containers-sigstore-signing-params.yaml.5.md !
|
||||
|
||||
PrivateKeyFile string `yaml:"privateKeyFile,omitempty"` // If set, sign using a private key stored in this file.
|
||||
PrivateKeyPassphraseFile string `yaml:"privateKeyPassphraseFile,omitempty"` // A file that contains the passprase required for PrivateKeyFile.
|
||||
|
||||
Fulcio *SigningParameterFileFulcio `yaml:"fulcio,omitempty"` // If set, sign using a short-lived key and a Fulcio-issued certificate.
|
||||
|
||||
RekorURL string `yaml:"rekorURL,omitempty"` // If set, upload the signature to the specified Rekor server, and include a log inclusion proof in the signature.
|
||||
}
|
||||
|
||||
// SigningParameterFileFulcio is a subset of SigningParameterFile dedicated to Fulcio parameters.
|
||||
type SigningParameterFileFulcio struct {
|
||||
// Keep this in sync with docs/containers-sigstore-signing-params.yaml.5.md !
|
||||
|
||||
FulcioURL string `yaml:"fulcioURL,omitempty"` // URL of the Fulcio server. Required.
|
||||
|
||||
// How to obtain the OIDC ID token required by Fulcio. Required.
|
||||
OIDCMode OIDCMode `yaml:"oidcMode,omitempty"`
|
||||
|
||||
// oidcMode = staticToken
|
||||
OIDCIDToken string `yaml:"oidcIDToken,omitempty"`
|
||||
|
||||
// oidcMode = deviceGrant || interactive
|
||||
OIDCIssuerURL string `yaml:"oidcIssuerURL,omitempty"` //
|
||||
OIDCClientID string `yaml:"oidcClientID,omitempty"`
|
||||
OIDCClientSecret string `yaml:"oidcClientSecret,omitempty"`
|
||||
}
|
||||
|
||||
type OIDCMode string
|
||||
|
||||
const (
|
||||
// OIDCModeStaticToken means the parameter file contains an user-provided OIDC ID token value.
|
||||
OIDCModeStaticToken OIDCMode = "staticToken"
|
||||
// OIDCModeDeviceGrant specifies the OIDC ID token should be obtained using a device authorization grant (RFC 8628).
|
||||
OIDCModeDeviceGrant OIDCMode = "deviceGrant"
|
||||
// OIDCModeInteractive specifies the OIDC ID token should be obtained interactively (automatically opening a browser,
|
||||
// or interactively prompting the user.)
|
||||
OIDCModeInteractive OIDCMode = "interactive"
|
||||
)
|
||||
|
||||
// ParseFile parses a SigningParameterFile at the specified path.
|
||||
//
|
||||
// Most consumers of the parameter file should use c/image/pkg/cli/sigstore to obtain a *signer.Signer instead.
|
||||
func ParseFile(path string) (*SigningParameterFile, error) {
|
||||
var res SigningParameterFile
|
||||
source, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("reading %q: %w", path, err)
|
||||
}
|
||||
dec := yaml.NewDecoder(bytes.NewReader(source))
|
||||
dec.KnownFields(true)
|
||||
if err = dec.Decode(&res); err != nil {
|
||||
return nil, fmt.Errorf("parsing %q: %w", path, err)
|
||||
}
|
||||
return &res, nil
|
||||
}
|
117
vendor/github.com/containers/image/v5/pkg/cli/sigstore/sigstore.go
generated
vendored
Normal file
117
vendor/github.com/containers/image/v5/pkg/cli/sigstore/sigstore.go
generated
vendored
Normal file
@ -0,0 +1,117 @@
|
||||
package sigstore
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
|
||||
"github.com/containers/image/v5/pkg/cli"
|
||||
"github.com/containers/image/v5/pkg/cli/sigstore/params"
|
||||
"github.com/containers/image/v5/signature/signer"
|
||||
"github.com/containers/image/v5/signature/sigstore"
|
||||
"github.com/containers/image/v5/signature/sigstore/fulcio"
|
||||
"github.com/containers/image/v5/signature/sigstore/rekor"
|
||||
)
|
||||
|
||||
// Options collects data that the caller should provide to NewSignerFromParameterFile.
|
||||
// The caller should set all fields unless documented otherwise.
|
||||
type Options struct {
|
||||
PrivateKeyPassphrasePrompt func(keyFile string) (string, error) // A function to call to interactively prompt for a passphrase
|
||||
Stdin io.Reader
|
||||
Stdout io.Writer
|
||||
}
|
||||
|
||||
// NewSignerFromParameterFile returns a signature.Signer which creates sigstore signatures based a parameter file at the specified path.
|
||||
//
|
||||
// The caller must call Close() on the returned Signer.
|
||||
func NewSignerFromParameterFile(path string, options *Options) (*signer.Signer, error) {
|
||||
params, err := params.ParseFile(path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("setting up signing using parameter file %q: %w", path, err)
|
||||
}
|
||||
return newSignerFromParameterData(params, options)
|
||||
}
|
||||
|
||||
// newSignerFromParameterData returns a signature.Signer which creates sigstore signatures based on parameter file contents.
|
||||
//
|
||||
// The caller must call Close() on the returned Signer.
|
||||
func newSignerFromParameterData(params *params.SigningParameterFile, options *Options) (*signer.Signer, error) {
|
||||
opts := []sigstore.Option{}
|
||||
if params.PrivateKeyFile != "" {
|
||||
var getPassphrase func(keyFile string) (string, error)
|
||||
switch {
|
||||
case params.PrivateKeyPassphraseFile != "":
|
||||
getPassphrase = func(_ string) (string, error) {
|
||||
return cli.ReadPassphraseFile(params.PrivateKeyPassphraseFile)
|
||||
}
|
||||
case options.PrivateKeyPassphrasePrompt != nil:
|
||||
getPassphrase = options.PrivateKeyPassphrasePrompt
|
||||
default: // This shouldn’t happen, the caller is expected to set options.PrivateKeyPassphrasePrompt
|
||||
return nil, fmt.Errorf("private key %s specified, but no way to get a passphrase", params.PrivateKeyFile)
|
||||
}
|
||||
passphrase, err := getPassphrase(params.PrivateKeyFile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
opts = append(opts, sigstore.WithPrivateKeyFile(params.PrivateKeyFile, []byte(passphrase)))
|
||||
}
|
||||
|
||||
if params.Fulcio != nil {
|
||||
fulcioOpt, err := fulcioOption(params.Fulcio, options)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
opts = append(opts, fulcioOpt)
|
||||
}
|
||||
|
||||
if params.RekorURL != "" {
|
||||
rekorURL, err := url.Parse(params.RekorURL)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parsing rekorURL %q: %w", params.RekorURL, err)
|
||||
}
|
||||
opts = append(opts, rekor.WithRekor(rekorURL))
|
||||
}
|
||||
|
||||
return sigstore.NewSigner(opts...)
|
||||
}
|
||||
|
||||
// fulcioOption returns a sigstore.Option for Fulcio use based on f.
|
||||
func fulcioOption(f *params.SigningParameterFileFulcio, options *Options) (sigstore.Option, error) {
|
||||
if f.FulcioURL == "" {
|
||||
return nil, errors.New("missing fulcioURL")
|
||||
}
|
||||
fulcioURL, err := url.Parse(f.FulcioURL)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parsing fulcioURL %q: %w", f.FulcioURL, err)
|
||||
}
|
||||
|
||||
if f.OIDCMode == params.OIDCModeStaticToken {
|
||||
if f.OIDCIDToken == "" {
|
||||
return nil, errors.New("missing oidcToken")
|
||||
}
|
||||
return fulcio.WithFulcioAndPreexistingOIDCIDToken(fulcioURL, f.OIDCIDToken), nil
|
||||
}
|
||||
|
||||
if f.OIDCIssuerURL == "" {
|
||||
return nil, errors.New("missing oidcIssuerURL")
|
||||
}
|
||||
oidcIssuerURL, err := url.Parse(f.OIDCIssuerURL)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parsing oidcIssuerURL %q: %w", f.OIDCIssuerURL, err)
|
||||
}
|
||||
switch f.OIDCMode {
|
||||
case params.OIDCModeDeviceGrant:
|
||||
return fulcio.WithFulcioAndDeviceAuthorizationGrantOIDC(fulcioURL, oidcIssuerURL, f.OIDCClientID, f.OIDCClientSecret,
|
||||
options.Stdout), nil
|
||||
case params.OIDCModeInteractive:
|
||||
return fulcio.WithFulcioAndInteractiveOIDC(fulcioURL, oidcIssuerURL, f.OIDCClientID, f.OIDCClientSecret,
|
||||
options.Stdin, options.Stdout), nil
|
||||
case "":
|
||||
return nil, errors.New("missing oidcMode")
|
||||
case params.OIDCModeStaticToken:
|
||||
return nil, errors.New("internal inconsistency: SigningParameterFileOIDCModeStaticToken was supposed to already be handled")
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown oidcMode value %q", f.OIDCMode)
|
||||
}
|
||||
}
|
155
vendor/github.com/containers/image/v5/signature/sigstore/fulcio/fulcio.go
generated
vendored
Normal file
155
vendor/github.com/containers/image/v5/signature/sigstore/fulcio/fulcio.go
generated
vendored
Normal file
@ -0,0 +1,155 @@
|
||||
package fulcio
|
||||
|
||||
import (
|
||||
"crypto"
|
||||
"crypto/ecdsa"
|
||||
"crypto/elliptic"
|
||||
"crypto/rand"
|
||||
"crypto/sha256"
|
||||
"crypto/x509"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
|
||||
"github.com/containers/image/v5/internal/useragent"
|
||||
"github.com/containers/image/v5/signature/sigstore/internal"
|
||||
"github.com/sigstore/fulcio/pkg/api"
|
||||
"github.com/sigstore/sigstore/pkg/oauth"
|
||||
"github.com/sigstore/sigstore/pkg/oauthflow"
|
||||
sigstoreSignature "github.com/sigstore/sigstore/pkg/signature"
|
||||
"github.com/sirupsen/logrus"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
// setupSignerWithFulcio updates s with a certificate generated by fulcioURL based on oidcIDToken
|
||||
func setupSignerWithFulcio(s *internal.SigstoreSigner, fulcioURL *url.URL, oidcIDToken *oauthflow.OIDCIDToken) error {
|
||||
// ECDSA-P256 is the only interoperable algorithm per
|
||||
// https://github.com/sigstore/cosign/blob/main/specs/SIGNATURE_SPEC.md#signature-schemes .
|
||||
privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
|
||||
if err != nil {
|
||||
return fmt.Errorf("generating short-term private key: %w", err)
|
||||
}
|
||||
keyAlgorithm := "ecdsa"
|
||||
// SHA-256 is opencontainers/go-digest.Canonical, thus the algorithm to use here as well per
|
||||
// https://github.com/sigstore/cosign/blob/main/specs/SIGNATURE_SPEC.md#hashing-algorithms
|
||||
signer, err := sigstoreSignature.LoadECDSASigner(privateKey, crypto.SHA256)
|
||||
if err != nil {
|
||||
return fmt.Errorf("initializing short-term private key: %w", err)
|
||||
}
|
||||
s.PrivateKey = signer
|
||||
|
||||
logrus.Debugf("Requesting a certificate from Fulcio at %s", fulcioURL.Redacted())
|
||||
fulcioClient := api.NewClient(fulcioURL, api.WithUserAgent(useragent.DefaultUserAgent))
|
||||
// Sign the email address as part of the request
|
||||
h := sha256.Sum256([]byte(oidcIDToken.Subject))
|
||||
keyOwnershipProof, err := ecdsa.SignASN1(rand.Reader, privateKey, h[:])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error signing key ownership proof: %w", err)
|
||||
}
|
||||
publicKeyBytes, err := x509.MarshalPKIXPublicKey(&privateKey.PublicKey)
|
||||
if err != nil {
|
||||
return fmt.Errorf("converting public key to ASN.1: %w", err)
|
||||
}
|
||||
// Note that unlike most OAuth2 uses, this passes the ID token, not an access token.
|
||||
// This is only secure if every Fulcio server has an individual client ID value
|
||||
// = fulcioOIDCClientID, distinct from other Fulcio servers,
|
||||
// that is embedded into the ID token’s "aud" field.
|
||||
resp, err := fulcioClient.SigningCert(api.CertificateRequest{
|
||||
PublicKey: api.Key{
|
||||
Content: publicKeyBytes,
|
||||
Algorithm: keyAlgorithm,
|
||||
},
|
||||
SignedEmailAddress: keyOwnershipProof,
|
||||
}, oidcIDToken.RawString)
|
||||
if err != nil {
|
||||
return fmt.Errorf("obtaining certificate from Fulcio: %w", err)
|
||||
}
|
||||
s.FulcioGeneratedCertificate = resp.CertPEM
|
||||
s.FulcioGeneratedCertificateChain = resp.ChainPEM
|
||||
// Cosign goes through an unmarshal/marshal roundtrip for Fulcio-generated certificates, let’s not do that.
|
||||
s.SigningKeyOrCert = resp.CertPEM
|
||||
return nil
|
||||
}
|
||||
|
||||
// WithFulcioAndPreexistingOIDCIDToken sets up signing to use a short-lived key and a Fulcio-issued certificate
|
||||
// based on a caller-provided OIDC ID token.
|
||||
func WithFulcioAndPreexistingOIDCIDToken(fulcioURL *url.URL, oidcIDToken string) internal.Option {
|
||||
return func(s *internal.SigstoreSigner) error {
|
||||
if s.PrivateKey != nil {
|
||||
return fmt.Errorf("multiple private key sources specified when preparing to create sigstore signatures")
|
||||
}
|
||||
|
||||
// This adds dependencies even just to parse the token. We could possibly reimplement that, and split this variant
|
||||
// into a subpackage without the OIDC dependencies… but really, is this going to be used in significantly different situations
|
||||
// than the two interactive OIDC authentication workflows?
|
||||
//
|
||||
// Are there any widely used tools to manually obtain an ID token? Why would there be?
|
||||
// For long-term usage, users provisioning a static OIDC credential might just as well provision an already-generated certificate
|
||||
// or something like that.
|
||||
logrus.Debugf("Using a statically-provided OIDC token")
|
||||
staticTokenGetter := oauthflow.StaticTokenGetter{RawToken: oidcIDToken}
|
||||
oidcIDToken, err := staticTokenGetter.GetIDToken(nil, oauth2.Config{})
|
||||
if err != nil {
|
||||
return fmt.Errorf("parsing OIDC token: %w", err)
|
||||
}
|
||||
|
||||
return setupSignerWithFulcio(s, fulcioURL, oidcIDToken)
|
||||
}
|
||||
}
|
||||
|
||||
// WithFulcioAndDeviceAuthorizationGrantOIDC sets up signing to use a short-lived key and a Fulcio-issued certificate
|
||||
// based on an OIDC ID token obtained using a device authorization grant (RFC 8628).
|
||||
//
|
||||
// interactiveOutput must be directly accesible to a human user in real time (i.e. not be just a log file).
|
||||
func WithFulcioAndDeviceAuthorizationGrantOIDC(fulcioURL *url.URL, oidcIssuerURL *url.URL, oidcClientID, oidcClientSecret string,
|
||||
interactiveOutput io.Writer) internal.Option {
|
||||
return func(s *internal.SigstoreSigner) error {
|
||||
if s.PrivateKey != nil {
|
||||
return fmt.Errorf("multiple private key sources specified when preparing to create sigstore signatures")
|
||||
}
|
||||
|
||||
logrus.Debugf("Starting OIDC device flow for issuer %s", oidcIssuerURL.Redacted())
|
||||
tokenGetter := oauthflow.NewDeviceFlowTokenGetterForIssuer(oidcIssuerURL.String())
|
||||
tokenGetter.MessagePrinter = func(s string) {
|
||||
fmt.Fprintln(interactiveOutput, s)
|
||||
}
|
||||
oidcIDToken, err := oauthflow.OIDConnect(oidcIssuerURL.String(), oidcClientID, oidcClientSecret, "", tokenGetter)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error authenticating with OIDC: %w", err)
|
||||
}
|
||||
|
||||
return setupSignerWithFulcio(s, fulcioURL, oidcIDToken)
|
||||
}
|
||||
}
|
||||
|
||||
// WithFulcioAndInterativeOIDC sets up signing to use a short-lived key and a Fulcio-issued certificate
|
||||
// based on an interactively-obtained OIDC ID token.
|
||||
// The token is obtained
|
||||
// - directly using a browser, listening on localhost, automatically opening a browser to the OIDC issuer,
|
||||
// to be redirected on localhost. (I.e. the current environment must allow launching a browser that connect back to the current process;
|
||||
// either or both may be impossible in a container or a remote VM).
|
||||
// - or by instructing the user to manually open a browser, obtain the OIDC code, and interactively input it as text.
|
||||
//
|
||||
// interactiveInput and interactiveOutput must both be directly operable by a human user in real time (i.e. not be just a log file).
|
||||
func WithFulcioAndInteractiveOIDC(fulcioURL *url.URL, oidcIssuerURL *url.URL, oidcClientID, oidcClientSecret string,
|
||||
interactiveInput io.Reader, interactiveOutput io.Writer) internal.Option {
|
||||
return func(s *internal.SigstoreSigner) error {
|
||||
if s.PrivateKey != nil {
|
||||
return fmt.Errorf("multiple private key sources specified when preparing to create sigstore signatures")
|
||||
}
|
||||
|
||||
logrus.Debugf("Starting interactive OIDC authentication for issuer %s", oidcIssuerURL.Redacted())
|
||||
// This is intended to match oauthflow.DefaultIDTokenGetter, overriding only input/output
|
||||
tokenGetter := &oauthflow.InteractiveIDTokenGetter{
|
||||
HTMLPage: oauth.InteractiveSuccessHTML,
|
||||
Input: interactiveInput,
|
||||
Output: interactiveOutput,
|
||||
}
|
||||
oidcIDToken, err := oauthflow.OIDConnect(oidcIssuerURL.String(), oidcClientID, oidcClientSecret, "", tokenGetter)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error authenticating with OIDC: %w", err)
|
||||
}
|
||||
|
||||
return setupSignerWithFulcio(s, fulcioURL, oidcIDToken)
|
||||
}
|
||||
}
|
52
vendor/github.com/containers/image/v5/signature/sigstore/rekor/leveled_logger.go
generated
vendored
Normal file
52
vendor/github.com/containers/image/v5/signature/sigstore/rekor/leveled_logger.go
generated
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
package rekor
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/hashicorp/go-retryablehttp"
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
// leveledLogger adapts our use of logrus to the expected go-retryablehttp.LeveledLogger interface.
|
||||
type leveledLogger struct {
|
||||
logger *logrus.Logger
|
||||
}
|
||||
|
||||
func leveledLoggerForLogrus(logger *logrus.Logger) retryablehttp.LeveledLogger {
|
||||
return &leveledLogger{logger: logger}
|
||||
}
|
||||
|
||||
// log is the actual conversion implementation
|
||||
func (l *leveledLogger) log(level logrus.Level, msg string, keysAndValues []interface{}) {
|
||||
fields := logrus.Fields{}
|
||||
for i := 0; i < len(keysAndValues)-1; i += 2 {
|
||||
key := keysAndValues[i]
|
||||
keyString, isString := key.(string)
|
||||
if !isString {
|
||||
// It seems attractive to panic() here, but we might already be in a failure state, so let’s not make it worse
|
||||
keyString = fmt.Sprintf("[Invalid LeveledLogger key %#v]", key)
|
||||
}
|
||||
fields[keyString] = keysAndValues[i+1]
|
||||
}
|
||||
l.logger.WithFields(fields).Log(level, msg)
|
||||
}
|
||||
|
||||
// Debug implements retryablehttp.LeveledLogger
|
||||
func (l *leveledLogger) Debug(msg string, keysAndValues ...interface{}) {
|
||||
l.log(logrus.DebugLevel, msg, keysAndValues)
|
||||
}
|
||||
|
||||
// Error implements retryablehttp.LeveledLogger
|
||||
func (l *leveledLogger) Error(msg string, keysAndValues ...interface{}) {
|
||||
l.log(logrus.ErrorLevel, msg, keysAndValues)
|
||||
}
|
||||
|
||||
// Info implements retryablehttp.LeveledLogger
|
||||
func (l *leveledLogger) Info(msg string, keysAndValues ...interface{}) {
|
||||
l.log(logrus.InfoLevel, msg, keysAndValues)
|
||||
}
|
||||
|
||||
// Warn implements retryablehttp.LeveledLogger
|
||||
func (l *leveledLogger) Warn(msg string, keysAndValues ...interface{}) {
|
||||
l.log(logrus.WarnLevel, msg, keysAndValues)
|
||||
}
|
160
vendor/github.com/containers/image/v5/signature/sigstore/rekor/rekor.go
generated
vendored
Normal file
160
vendor/github.com/containers/image/v5/signature/sigstore/rekor/rekor.go
generated
vendored
Normal file
@ -0,0 +1,160 @@
|
||||
package rekor
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/containers/image/v5/signature/internal"
|
||||
signerInternal "github.com/containers/image/v5/signature/sigstore/internal"
|
||||
"github.com/go-openapi/strfmt"
|
||||
"github.com/go-openapi/swag"
|
||||
rekor "github.com/sigstore/rekor/pkg/client"
|
||||
"github.com/sigstore/rekor/pkg/generated/client"
|
||||
"github.com/sigstore/rekor/pkg/generated/client/entries"
|
||||
"github.com/sigstore/rekor/pkg/generated/models"
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
// WithRekor asks the generated signature to be uploaded to the specified Rekor server,
|
||||
// and to include a log inclusion proof in the signature.
|
||||
func WithRekor(rekorURL *url.URL) signerInternal.Option {
|
||||
return func(s *signerInternal.SigstoreSigner) error {
|
||||
logrus.Debugf("Using Rekor server at %s", rekorURL.Redacted())
|
||||
client, err := rekor.GetRekorClient(rekorURL.String(),
|
||||
rekor.WithLogger(leveledLoggerForLogrus(logrus.StandardLogger())))
|
||||
if err != nil {
|
||||
return fmt.Errorf("creating Rekor client: %w", err)
|
||||
}
|
||||
u := uploader{
|
||||
client: client,
|
||||
}
|
||||
s.RekorUploader = u.uploadKeyOrCert
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// uploader wraps a Rekor client, basically so that we can set RekorUploader to a method instead of an one-off closure.
|
||||
type uploader struct {
|
||||
client *client.Rekor
|
||||
}
|
||||
|
||||
// rekorEntryToSET converts a Rekor log entry into a sigstore “signed entry timestamp”.
|
||||
func rekorEntryToSET(entry *models.LogEntryAnon) (internal.UntrustedRekorSET, error) {
|
||||
// We could plausibly call entry.Validate() here; that mostly just uses unnecessary reflection instead of direct == nil checks.
|
||||
// Right now the only extra validation .Validate() does is *entry.LogIndex >= 0 and a regex check on *entry.LogID;
|
||||
// we don’t particularly care about either of these (notably signature verification only uses the Body value).
|
||||
if entry.Verification == nil || entry.IntegratedTime == nil || entry.LogIndex == nil || entry.LogID == nil {
|
||||
return internal.UntrustedRekorSET{}, fmt.Errorf("invalid Rekor entry (missing data): %#v", *entry)
|
||||
}
|
||||
bodyBase64, ok := entry.Body.(string)
|
||||
if !ok {
|
||||
return internal.UntrustedRekorSET{}, fmt.Errorf("unexpected Rekor entry body type: %#v", entry.Body)
|
||||
}
|
||||
body, err := base64.StdEncoding.DecodeString(bodyBase64)
|
||||
if err != nil {
|
||||
return internal.UntrustedRekorSET{}, fmt.Errorf("error parsing Rekor entry body: %w", err)
|
||||
}
|
||||
payloadJSON, err := internal.UntrustedRekorPayload{
|
||||
Body: body,
|
||||
IntegratedTime: *entry.IntegratedTime,
|
||||
LogIndex: *entry.LogIndex,
|
||||
LogID: *entry.LogID,
|
||||
}.MarshalJSON()
|
||||
if err != nil {
|
||||
return internal.UntrustedRekorSET{}, err
|
||||
}
|
||||
|
||||
return internal.UntrustedRekorSET{
|
||||
UntrustedSignedEntryTimestamp: entry.Verification.SignedEntryTimestamp,
|
||||
UntrustedPayload: payloadJSON,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// uploadEntry ensures proposedEntry exists in Rekor (usually uploading it), and returns the resulting log entry.
|
||||
func (u *uploader) uploadEntry(ctx context.Context, proposedEntry models.ProposedEntry) (models.LogEntry, error) {
|
||||
params := entries.NewCreateLogEntryParamsWithContext(ctx)
|
||||
params.SetProposedEntry(proposedEntry)
|
||||
logrus.Debugf("Calling Rekor's CreateLogEntry")
|
||||
resp, err := u.client.Entries.CreateLogEntry(params)
|
||||
if err != nil {
|
||||
// In ordinary operation, we should not get duplicate entries, because our payload contains a timestamp,
|
||||
// so it is supposed to be unique; and the default key format, ECDSA p256, also contains a nonce.
|
||||
// But conflicts can fairly easily happen during debugging and experimentation, so it pays to handle this.
|
||||
var conflictErr *entries.CreateLogEntryConflict
|
||||
if errors.As(err, &conflictErr) && conflictErr.Location != "" {
|
||||
location := conflictErr.Location.String()
|
||||
logrus.Debugf("CreateLogEntry reported a conflict, location = %s", location)
|
||||
// We might be able to just GET the returned Location, but let’s use the generated API client.
|
||||
// OTOH that requires us to hard-code the URI structure…
|
||||
uuidDelimiter := strings.LastIndexByte(location, '/')
|
||||
if uuidDelimiter != -1 { // Otherwise the URI is unexpected, and fall through to the bottom
|
||||
uuid := location[uuidDelimiter+1:]
|
||||
logrus.Debugf("Calling Rekor's NewGetLogEntryByUUIDParamsWithContext")
|
||||
params2 := entries.NewGetLogEntryByUUIDParamsWithContext(ctx)
|
||||
params2.SetEntryUUID(uuid)
|
||||
resp2, err := u.client.Entries.GetLogEntryByUUID(params2)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error re-loading previously-created log entry with UUID %s: %w", uuid, err)
|
||||
}
|
||||
return resp2.GetPayload(), nil
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("Error uploading a log entry: %w", err)
|
||||
}
|
||||
return resp.GetPayload(), nil
|
||||
}
|
||||
|
||||
// uploadKeyOrCert integrates this code into sigstore/internal.Signer.
|
||||
// Given components of the created signature, it returns a SET that should be added to the signature.
|
||||
func (u *uploader) uploadKeyOrCert(ctx context.Context, keyOrCertBytes []byte, signatureBytes []byte, payloadBytes []byte) ([]byte, error) {
|
||||
payloadHash := sha256.Sum256(payloadBytes) // HashedRecord only accepts SHA-256
|
||||
proposedEntry := models.Hashedrekord{
|
||||
APIVersion: swag.String(internal.HashedRekordV001APIVersion),
|
||||
Spec: models.HashedrekordV001Schema{
|
||||
Data: &models.HashedrekordV001SchemaData{
|
||||
Hash: &models.HashedrekordV001SchemaDataHash{
|
||||
Algorithm: swag.String(models.HashedrekordV001SchemaDataHashAlgorithmSha256),
|
||||
Value: swag.String(hex.EncodeToString(payloadHash[:])),
|
||||
},
|
||||
},
|
||||
Signature: &models.HashedrekordV001SchemaSignature{
|
||||
Content: strfmt.Base64(signatureBytes),
|
||||
PublicKey: &models.HashedrekordV001SchemaSignaturePublicKey{
|
||||
Content: strfmt.Base64(keyOrCertBytes),
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
uploadedPayload, err := u.uploadEntry(ctx, &proposedEntry)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(uploadedPayload) != 1 {
|
||||
return nil, fmt.Errorf("expected 1 Rekor entry, got %d", len(uploadedPayload))
|
||||
}
|
||||
var storedEntry *models.LogEntryAnon
|
||||
// This “loop” extracts the single value from the uploadedPayload map.
|
||||
for _, p := range uploadedPayload {
|
||||
storedEntry = &p
|
||||
break
|
||||
}
|
||||
|
||||
rekorBundle, err := rekorEntryToSET(storedEntry)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rekorSETBytes, err := json.Marshal(rekorBundle)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return rekorSETBytes, nil
|
||||
}
|
202
vendor/github.com/coreos/go-oidc/v3/LICENSE
generated
vendored
Normal file
202
vendor/github.com/coreos/go-oidc/v3/LICENSE
generated
vendored
Normal file
@ -0,0 +1,202 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
5
vendor/github.com/coreos/go-oidc/v3/NOTICE
generated
vendored
Normal file
5
vendor/github.com/coreos/go-oidc/v3/NOTICE
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
CoreOS Project
|
||||
Copyright 2014 CoreOS, Inc
|
||||
|
||||
This product includes software developed at CoreOS, Inc.
|
||||
(http://www.coreos.com/).
|
16
vendor/github.com/coreos/go-oidc/v3/oidc/jose.go
generated
vendored
Normal file
16
vendor/github.com/coreos/go-oidc/v3/oidc/jose.go
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
package oidc
|
||||
|
||||
// JOSE asymmetric signing algorithm values as defined by RFC 7518
|
||||
//
|
||||
// see: https://tools.ietf.org/html/rfc7518#section-3.1
|
||||
const (
|
||||
RS256 = "RS256" // RSASSA-PKCS-v1.5 using SHA-256
|
||||
RS384 = "RS384" // RSASSA-PKCS-v1.5 using SHA-384
|
||||
RS512 = "RS512" // RSASSA-PKCS-v1.5 using SHA-512
|
||||
ES256 = "ES256" // ECDSA using P-256 and SHA-256
|
||||
ES384 = "ES384" // ECDSA using P-384 and SHA-384
|
||||
ES512 = "ES512" // ECDSA using P-521 and SHA-512
|
||||
PS256 = "PS256" // RSASSA-PSS using SHA256 and MGF1-SHA256
|
||||
PS384 = "PS384" // RSASSA-PSS using SHA384 and MGF1-SHA384
|
||||
PS512 = "PS512" // RSASSA-PSS using SHA512 and MGF1-SHA512
|
||||
)
|
248
vendor/github.com/coreos/go-oidc/v3/oidc/jwks.go
generated
vendored
Normal file
248
vendor/github.com/coreos/go-oidc/v3/oidc/jwks.go
generated
vendored
Normal file
@ -0,0 +1,248 @@
|
||||
package oidc
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto"
|
||||
"crypto/ecdsa"
|
||||
"crypto/rsa"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
jose "gopkg.in/square/go-jose.v2"
|
||||
)
|
||||
|
||||
// StaticKeySet is a verifier that validates JWT against a static set of public keys.
|
||||
type StaticKeySet struct {
|
||||
// PublicKeys used to verify the JWT. Supported types are *rsa.PublicKey and
|
||||
// *ecdsa.PublicKey.
|
||||
PublicKeys []crypto.PublicKey
|
||||
}
|
||||
|
||||
// VerifySignature compares the signature against a static set of public keys.
|
||||
func (s *StaticKeySet) VerifySignature(ctx context.Context, jwt string) ([]byte, error) {
|
||||
jws, err := jose.ParseSigned(jwt)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parsing jwt: %v", err)
|
||||
}
|
||||
for _, pub := range s.PublicKeys {
|
||||
switch pub.(type) {
|
||||
case *rsa.PublicKey:
|
||||
case *ecdsa.PublicKey:
|
||||
default:
|
||||
return nil, fmt.Errorf("invalid public key type provided: %T", pub)
|
||||
}
|
||||
payload, err := jws.Verify(pub)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
return payload, nil
|
||||
}
|
||||
return nil, fmt.Errorf("no public keys able to verify jwt")
|
||||
}
|
||||
|
||||
// NewRemoteKeySet returns a KeySet that can validate JSON web tokens by using HTTP
|
||||
// GETs to fetch JSON web token sets hosted at a remote URL. This is automatically
|
||||
// used by NewProvider using the URLs returned by OpenID Connect discovery, but is
|
||||
// exposed for providers that don't support discovery or to prevent round trips to the
|
||||
// discovery URL.
|
||||
//
|
||||
// The returned KeySet is a long lived verifier that caches keys based on any
|
||||
// keys change. Reuse a common remote key set instead of creating new ones as needed.
|
||||
func NewRemoteKeySet(ctx context.Context, jwksURL string) *RemoteKeySet {
|
||||
return newRemoteKeySet(ctx, jwksURL, time.Now)
|
||||
}
|
||||
|
||||
func newRemoteKeySet(ctx context.Context, jwksURL string, now func() time.Time) *RemoteKeySet {
|
||||
if now == nil {
|
||||
now = time.Now
|
||||
}
|
||||
return &RemoteKeySet{jwksURL: jwksURL, ctx: cloneContext(ctx), now: now}
|
||||
}
|
||||
|
||||
// RemoteKeySet is a KeySet implementation that validates JSON web tokens against
|
||||
// a jwks_uri endpoint.
|
||||
type RemoteKeySet struct {
|
||||
jwksURL string
|
||||
ctx context.Context
|
||||
now func() time.Time
|
||||
|
||||
// guard all other fields
|
||||
mu sync.RWMutex
|
||||
|
||||
// inflight suppresses parallel execution of updateKeys and allows
|
||||
// multiple goroutines to wait for its result.
|
||||
inflight *inflight
|
||||
|
||||
// A set of cached keys.
|
||||
cachedKeys []jose.JSONWebKey
|
||||
}
|
||||
|
||||
// inflight is used to wait on some in-flight request from multiple goroutines.
|
||||
type inflight struct {
|
||||
doneCh chan struct{}
|
||||
|
||||
keys []jose.JSONWebKey
|
||||
err error
|
||||
}
|
||||
|
||||
func newInflight() *inflight {
|
||||
return &inflight{doneCh: make(chan struct{})}
|
||||
}
|
||||
|
||||
// wait returns a channel that multiple goroutines can receive on. Once it returns
|
||||
// a value, the inflight request is done and result() can be inspected.
|
||||
func (i *inflight) wait() <-chan struct{} {
|
||||
return i.doneCh
|
||||
}
|
||||
|
||||
// done can only be called by a single goroutine. It records the result of the
|
||||
// inflight request and signals other goroutines that the result is safe to
|
||||
// inspect.
|
||||
func (i *inflight) done(keys []jose.JSONWebKey, err error) {
|
||||
i.keys = keys
|
||||
i.err = err
|
||||
close(i.doneCh)
|
||||
}
|
||||
|
||||
// result cannot be called until the wait() channel has returned a value.
|
||||
func (i *inflight) result() ([]jose.JSONWebKey, error) {
|
||||
return i.keys, i.err
|
||||
}
|
||||
|
||||
// paresdJWTKey is a context key that allows common setups to avoid parsing the
|
||||
// JWT twice. It holds a *jose.JSONWebSignature value.
|
||||
var parsedJWTKey contextKey
|
||||
|
||||
// VerifySignature validates a payload against a signature from the jwks_uri.
|
||||
//
|
||||
// Users MUST NOT call this method directly and should use an IDTokenVerifier
|
||||
// instead. This method skips critical validations such as 'alg' values and is
|
||||
// only exported to implement the KeySet interface.
|
||||
func (r *RemoteKeySet) VerifySignature(ctx context.Context, jwt string) ([]byte, error) {
|
||||
jws, ok := ctx.Value(parsedJWTKey).(*jose.JSONWebSignature)
|
||||
if !ok {
|
||||
var err error
|
||||
jws, err = jose.ParseSigned(jwt)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: malformed jwt: %v", err)
|
||||
}
|
||||
}
|
||||
return r.verify(ctx, jws)
|
||||
}
|
||||
|
||||
func (r *RemoteKeySet) verify(ctx context.Context, jws *jose.JSONWebSignature) ([]byte, error) {
|
||||
// We don't support JWTs signed with multiple signatures.
|
||||
keyID := ""
|
||||
for _, sig := range jws.Signatures {
|
||||
keyID = sig.Header.KeyID
|
||||
break
|
||||
}
|
||||
|
||||
keys := r.keysFromCache()
|
||||
for _, key := range keys {
|
||||
if keyID == "" || key.KeyID == keyID {
|
||||
if payload, err := jws.Verify(&key); err == nil {
|
||||
return payload, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If the kid doesn't match, check for new keys from the remote. This is the
|
||||
// strategy recommended by the spec.
|
||||
//
|
||||
// https://openid.net/specs/openid-connect-core-1_0.html#RotateSigKeys
|
||||
keys, err := r.keysFromRemote(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("fetching keys %v", err)
|
||||
}
|
||||
|
||||
for _, key := range keys {
|
||||
if keyID == "" || key.KeyID == keyID {
|
||||
if payload, err := jws.Verify(&key); err == nil {
|
||||
return payload, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, errors.New("failed to verify id token signature")
|
||||
}
|
||||
|
||||
func (r *RemoteKeySet) keysFromCache() (keys []jose.JSONWebKey) {
|
||||
r.mu.RLock()
|
||||
defer r.mu.RUnlock()
|
||||
return r.cachedKeys
|
||||
}
|
||||
|
||||
// keysFromRemote syncs the key set from the remote set, records the values in the
|
||||
// cache, and returns the key set.
|
||||
func (r *RemoteKeySet) keysFromRemote(ctx context.Context) ([]jose.JSONWebKey, error) {
|
||||
// Need to lock to inspect the inflight request field.
|
||||
r.mu.Lock()
|
||||
// If there's not a current inflight request, create one.
|
||||
if r.inflight == nil {
|
||||
r.inflight = newInflight()
|
||||
|
||||
// This goroutine has exclusive ownership over the current inflight
|
||||
// request. It releases the resource by nil'ing the inflight field
|
||||
// once the goroutine is done.
|
||||
go func() {
|
||||
// Sync keys and finish inflight when that's done.
|
||||
keys, err := r.updateKeys()
|
||||
|
||||
r.inflight.done(keys, err)
|
||||
|
||||
// Lock to update the keys and indicate that there is no longer an
|
||||
// inflight request.
|
||||
r.mu.Lock()
|
||||
defer r.mu.Unlock()
|
||||
|
||||
if err == nil {
|
||||
r.cachedKeys = keys
|
||||
}
|
||||
|
||||
// Free inflight so a different request can run.
|
||||
r.inflight = nil
|
||||
}()
|
||||
}
|
||||
inflight := r.inflight
|
||||
r.mu.Unlock()
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil, ctx.Err()
|
||||
case <-inflight.wait():
|
||||
return inflight.result()
|
||||
}
|
||||
}
|
||||
|
||||
func (r *RemoteKeySet) updateKeys() ([]jose.JSONWebKey, error) {
|
||||
req, err := http.NewRequest("GET", r.jwksURL, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: can't create request: %v", err)
|
||||
}
|
||||
|
||||
resp, err := doRequest(r.ctx, req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: get keys failed %v", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to read response body: %v", err)
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("oidc: get keys failed: %s %s", resp.Status, body)
|
||||
}
|
||||
|
||||
var keySet jose.JSONWebKeySet
|
||||
err = unmarshalResp(resp, body, &keySet)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: failed to decode keys: %v %s", err, body)
|
||||
}
|
||||
return keySet.Keys, nil
|
||||
}
|
522
vendor/github.com/coreos/go-oidc/v3/oidc/oidc.go
generated
vendored
Normal file
522
vendor/github.com/coreos/go-oidc/v3/oidc/oidc.go
generated
vendored
Normal file
@ -0,0 +1,522 @@
|
||||
// Package oidc implements OpenID Connect client logic for the golang.org/x/oauth2 package.
|
||||
package oidc
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"crypto/sha512"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"hash"
|
||||
"io/ioutil"
|
||||
"mime"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
const (
|
||||
// ScopeOpenID is the mandatory scope for all OpenID Connect OAuth2 requests.
|
||||
ScopeOpenID = "openid"
|
||||
|
||||
// ScopeOfflineAccess is an optional scope defined by OpenID Connect for requesting
|
||||
// OAuth2 refresh tokens.
|
||||
//
|
||||
// Support for this scope differs between OpenID Connect providers. For instance
|
||||
// Google rejects it, favoring appending "access_type=offline" as part of the
|
||||
// authorization request instead.
|
||||
//
|
||||
// See: https://openid.net/specs/openid-connect-core-1_0.html#OfflineAccess
|
||||
ScopeOfflineAccess = "offline_access"
|
||||
)
|
||||
|
||||
var (
|
||||
errNoAtHash = errors.New("id token did not have an access token hash")
|
||||
errInvalidAtHash = errors.New("access token hash does not match value in ID token")
|
||||
)
|
||||
|
||||
type contextKey int
|
||||
|
||||
var issuerURLKey contextKey
|
||||
|
||||
// ClientContext returns a new Context that carries the provided HTTP client.
|
||||
//
|
||||
// This method sets the same context key used by the golang.org/x/oauth2 package,
|
||||
// so the returned context works for that package too.
|
||||
//
|
||||
// myClient := &http.Client{}
|
||||
// ctx := oidc.ClientContext(parentContext, myClient)
|
||||
//
|
||||
// // This will use the custom client
|
||||
// provider, err := oidc.NewProvider(ctx, "https://accounts.example.com")
|
||||
//
|
||||
func ClientContext(ctx context.Context, client *http.Client) context.Context {
|
||||
return context.WithValue(ctx, oauth2.HTTPClient, client)
|
||||
}
|
||||
|
||||
// cloneContext copies a context's bag-of-values into a new context that isn't
|
||||
// associated with its cancellation. This is used to initialize remote keys sets
|
||||
// which run in the background and aren't associated with the initial context.
|
||||
func cloneContext(ctx context.Context) context.Context {
|
||||
cp := context.Background()
|
||||
if c, ok := ctx.Value(oauth2.HTTPClient).(*http.Client); ok {
|
||||
cp = ClientContext(cp, c)
|
||||
}
|
||||
return cp
|
||||
}
|
||||
|
||||
// InsecureIssuerURLContext allows discovery to work when the issuer_url reported
|
||||
// by upstream is mismatched with the discovery URL. This is meant for integration
|
||||
// with off-spec providers such as Azure.
|
||||
//
|
||||
// discoveryBaseURL := "https://login.microsoftonline.com/organizations/v2.0"
|
||||
// issuerURL := "https://login.microsoftonline.com/my-tenantid/v2.0"
|
||||
//
|
||||
// ctx := oidc.InsecureIssuerURLContext(parentContext, issuerURL)
|
||||
//
|
||||
// // Provider will be discovered with the discoveryBaseURL, but use issuerURL
|
||||
// // for future issuer validation.
|
||||
// provider, err := oidc.NewProvider(ctx, discoveryBaseURL)
|
||||
//
|
||||
// This is insecure because validating the correct issuer is critical for multi-tenant
|
||||
// proivders. Any overrides here MUST be carefully reviewed.
|
||||
func InsecureIssuerURLContext(ctx context.Context, issuerURL string) context.Context {
|
||||
return context.WithValue(ctx, issuerURLKey, issuerURL)
|
||||
}
|
||||
|
||||
func doRequest(ctx context.Context, req *http.Request) (*http.Response, error) {
|
||||
client := http.DefaultClient
|
||||
if c, ok := ctx.Value(oauth2.HTTPClient).(*http.Client); ok {
|
||||
client = c
|
||||
}
|
||||
return client.Do(req.WithContext(ctx))
|
||||
}
|
||||
|
||||
// Provider represents an OpenID Connect server's configuration.
|
||||
type Provider struct {
|
||||
issuer string
|
||||
authURL string
|
||||
tokenURL string
|
||||
userInfoURL string
|
||||
algorithms []string
|
||||
|
||||
// Raw claims returned by the server.
|
||||
rawClaims []byte
|
||||
|
||||
remoteKeySet KeySet
|
||||
}
|
||||
|
||||
type providerJSON struct {
|
||||
Issuer string `json:"issuer"`
|
||||
AuthURL string `json:"authorization_endpoint"`
|
||||
TokenURL string `json:"token_endpoint"`
|
||||
JWKSURL string `json:"jwks_uri"`
|
||||
UserInfoURL string `json:"userinfo_endpoint"`
|
||||
Algorithms []string `json:"id_token_signing_alg_values_supported"`
|
||||
}
|
||||
|
||||
// supportedAlgorithms is a list of algorithms explicitly supported by this
|
||||
// package. If a provider supports other algorithms, such as HS256 or none,
|
||||
// those values won't be passed to the IDTokenVerifier.
|
||||
var supportedAlgorithms = map[string]bool{
|
||||
RS256: true,
|
||||
RS384: true,
|
||||
RS512: true,
|
||||
ES256: true,
|
||||
ES384: true,
|
||||
ES512: true,
|
||||
PS256: true,
|
||||
PS384: true,
|
||||
PS512: true,
|
||||
}
|
||||
|
||||
// ProviderConfig allows creating providers when discovery isn't supported. It's
|
||||
// generally easier to use NewProvider directly.
|
||||
type ProviderConfig struct {
|
||||
// IssuerURL is the identity of the provider, and the string it uses to sign
|
||||
// ID tokens with. For example "https://accounts.google.com". This value MUST
|
||||
// match ID tokens exactly.
|
||||
IssuerURL string
|
||||
// AuthURL is the endpoint used by the provider to support the OAuth 2.0
|
||||
// authorization endpoint.
|
||||
AuthURL string
|
||||
// TokenURL is the endpoint used by the provider to support the OAuth 2.0
|
||||
// token endpoint.
|
||||
TokenURL string
|
||||
// UserInfoURL is the endpoint used by the provider to support the OpenID
|
||||
// Connect UserInfo flow.
|
||||
//
|
||||
// https://openid.net/specs/openid-connect-core-1_0.html#UserInfo
|
||||
UserInfoURL string
|
||||
// JWKSURL is the endpoint used by the provider to advertise public keys to
|
||||
// verify issued ID tokens. This endpoint is polled as new keys are made
|
||||
// available.
|
||||
JWKSURL string
|
||||
|
||||
// Algorithms, if provided, indicate a list of JWT algorithms allowed to sign
|
||||
// ID tokens. If not provided, this defaults to the algorithms advertised by
|
||||
// the JWK endpoint, then the set of algorithms supported by this package.
|
||||
Algorithms []string
|
||||
}
|
||||
|
||||
// NewProvider initializes a provider from a set of endpoints, rather than
|
||||
// through discovery.
|
||||
func (p *ProviderConfig) NewProvider(ctx context.Context) *Provider {
|
||||
return &Provider{
|
||||
issuer: p.IssuerURL,
|
||||
authURL: p.AuthURL,
|
||||
tokenURL: p.TokenURL,
|
||||
userInfoURL: p.UserInfoURL,
|
||||
algorithms: p.Algorithms,
|
||||
remoteKeySet: NewRemoteKeySet(cloneContext(ctx), p.JWKSURL),
|
||||
}
|
||||
}
|
||||
|
||||
// NewProvider uses the OpenID Connect discovery mechanism to construct a Provider.
|
||||
//
|
||||
// The issuer is the URL identifier for the service. For example: "https://accounts.google.com"
|
||||
// or "https://login.salesforce.com".
|
||||
func NewProvider(ctx context.Context, issuer string) (*Provider, error) {
|
||||
wellKnown := strings.TrimSuffix(issuer, "/") + "/.well-known/openid-configuration"
|
||||
req, err := http.NewRequest("GET", wellKnown, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resp, err := doRequest(ctx, req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to read response body: %v", err)
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("%s: %s", resp.Status, body)
|
||||
}
|
||||
|
||||
var p providerJSON
|
||||
err = unmarshalResp(resp, body, &p)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: failed to decode provider discovery object: %v", err)
|
||||
}
|
||||
|
||||
issuerURL, skipIssuerValidation := ctx.Value(issuerURLKey).(string)
|
||||
if !skipIssuerValidation {
|
||||
issuerURL = issuer
|
||||
}
|
||||
if p.Issuer != issuerURL && !skipIssuerValidation {
|
||||
return nil, fmt.Errorf("oidc: issuer did not match the issuer returned by provider, expected %q got %q", issuer, p.Issuer)
|
||||
}
|
||||
var algs []string
|
||||
for _, a := range p.Algorithms {
|
||||
if supportedAlgorithms[a] {
|
||||
algs = append(algs, a)
|
||||
}
|
||||
}
|
||||
return &Provider{
|
||||
issuer: issuerURL,
|
||||
authURL: p.AuthURL,
|
||||
tokenURL: p.TokenURL,
|
||||
userInfoURL: p.UserInfoURL,
|
||||
algorithms: algs,
|
||||
rawClaims: body,
|
||||
remoteKeySet: NewRemoteKeySet(cloneContext(ctx), p.JWKSURL),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Claims unmarshals raw fields returned by the server during discovery.
|
||||
//
|
||||
// var claims struct {
|
||||
// ScopesSupported []string `json:"scopes_supported"`
|
||||
// ClaimsSupported []string `json:"claims_supported"`
|
||||
// }
|
||||
//
|
||||
// if err := provider.Claims(&claims); err != nil {
|
||||
// // handle unmarshaling error
|
||||
// }
|
||||
//
|
||||
// For a list of fields defined by the OpenID Connect spec see:
|
||||
// https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata
|
||||
func (p *Provider) Claims(v interface{}) error {
|
||||
if p.rawClaims == nil {
|
||||
return errors.New("oidc: claims not set")
|
||||
}
|
||||
return json.Unmarshal(p.rawClaims, v)
|
||||
}
|
||||
|
||||
// Endpoint returns the OAuth2 auth and token endpoints for the given provider.
|
||||
func (p *Provider) Endpoint() oauth2.Endpoint {
|
||||
return oauth2.Endpoint{AuthURL: p.authURL, TokenURL: p.tokenURL}
|
||||
}
|
||||
|
||||
// UserInfo represents the OpenID Connect userinfo claims.
|
||||
type UserInfo struct {
|
||||
Subject string `json:"sub"`
|
||||
Profile string `json:"profile"`
|
||||
Email string `json:"email"`
|
||||
EmailVerified bool `json:"email_verified"`
|
||||
|
||||
claims []byte
|
||||
}
|
||||
|
||||
type userInfoRaw struct {
|
||||
Subject string `json:"sub"`
|
||||
Profile string `json:"profile"`
|
||||
Email string `json:"email"`
|
||||
// Handle providers that return email_verified as a string
|
||||
// https://forums.aws.amazon.com/thread.jspa?messageID=949441󧳁 and
|
||||
// https://discuss.elastic.co/t/openid-error-after-authenticating-against-aws-cognito/206018/11
|
||||
EmailVerified stringAsBool `json:"email_verified"`
|
||||
}
|
||||
|
||||
// Claims unmarshals the raw JSON object claims into the provided object.
|
||||
func (u *UserInfo) Claims(v interface{}) error {
|
||||
if u.claims == nil {
|
||||
return errors.New("oidc: claims not set")
|
||||
}
|
||||
return json.Unmarshal(u.claims, v)
|
||||
}
|
||||
|
||||
// UserInfo uses the token source to query the provider's user info endpoint.
|
||||
func (p *Provider) UserInfo(ctx context.Context, tokenSource oauth2.TokenSource) (*UserInfo, error) {
|
||||
if p.userInfoURL == "" {
|
||||
return nil, errors.New("oidc: user info endpoint is not supported by this provider")
|
||||
}
|
||||
|
||||
req, err := http.NewRequest("GET", p.userInfoURL, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: create GET request: %v", err)
|
||||
}
|
||||
|
||||
token, err := tokenSource.Token()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: get access token: %v", err)
|
||||
}
|
||||
token.SetAuthHeader(req)
|
||||
|
||||
resp, err := doRequest(ctx, req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("%s: %s", resp.Status, body)
|
||||
}
|
||||
|
||||
ct := resp.Header.Get("Content-Type")
|
||||
mediaType, _, parseErr := mime.ParseMediaType(ct)
|
||||
if parseErr == nil && mediaType == "application/jwt" {
|
||||
payload, err := p.remoteKeySet.VerifySignature(ctx, string(body))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: invalid userinfo jwt signature %v", err)
|
||||
}
|
||||
body = payload
|
||||
}
|
||||
|
||||
var userInfo userInfoRaw
|
||||
if err := json.Unmarshal(body, &userInfo); err != nil {
|
||||
return nil, fmt.Errorf("oidc: failed to decode userinfo: %v", err)
|
||||
}
|
||||
return &UserInfo{
|
||||
Subject: userInfo.Subject,
|
||||
Profile: userInfo.Profile,
|
||||
Email: userInfo.Email,
|
||||
EmailVerified: bool(userInfo.EmailVerified),
|
||||
claims: body,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// IDToken is an OpenID Connect extension that provides a predictable representation
|
||||
// of an authorization event.
|
||||
//
|
||||
// The ID Token only holds fields OpenID Connect requires. To access additional
|
||||
// claims returned by the server, use the Claims method.
|
||||
type IDToken struct {
|
||||
// The URL of the server which issued this token. OpenID Connect
|
||||
// requires this value always be identical to the URL used for
|
||||
// initial discovery.
|
||||
//
|
||||
// Note: Because of a known issue with Google Accounts' implementation
|
||||
// this value may differ when using Google.
|
||||
//
|
||||
// See: https://developers.google.com/identity/protocols/OpenIDConnect#obtainuserinfo
|
||||
Issuer string
|
||||
|
||||
// The client ID, or set of client IDs, that this token is issued for. For
|
||||
// common uses, this is the client that initialized the auth flow.
|
||||
//
|
||||
// This package ensures the audience contains an expected value.
|
||||
Audience []string
|
||||
|
||||
// A unique string which identifies the end user.
|
||||
Subject string
|
||||
|
||||
// Expiry of the token. Ths package will not process tokens that have
|
||||
// expired unless that validation is explicitly turned off.
|
||||
Expiry time.Time
|
||||
// When the token was issued by the provider.
|
||||
IssuedAt time.Time
|
||||
|
||||
// Initial nonce provided during the authentication redirect.
|
||||
//
|
||||
// This package does NOT provided verification on the value of this field
|
||||
// and it's the user's responsibility to ensure it contains a valid value.
|
||||
Nonce string
|
||||
|
||||
// at_hash claim, if set in the ID token. Callers can verify an access token
|
||||
// that corresponds to the ID token using the VerifyAccessToken method.
|
||||
AccessTokenHash string
|
||||
|
||||
// signature algorithm used for ID token, needed to compute a verification hash of an
|
||||
// access token
|
||||
sigAlgorithm string
|
||||
|
||||
// Raw payload of the id_token.
|
||||
claims []byte
|
||||
|
||||
// Map of distributed claim names to claim sources
|
||||
distributedClaims map[string]claimSource
|
||||
}
|
||||
|
||||
// Claims unmarshals the raw JSON payload of the ID Token into a provided struct.
|
||||
//
|
||||
// idToken, err := idTokenVerifier.Verify(rawIDToken)
|
||||
// if err != nil {
|
||||
// // handle error
|
||||
// }
|
||||
// var claims struct {
|
||||
// Email string `json:"email"`
|
||||
// EmailVerified bool `json:"email_verified"`
|
||||
// }
|
||||
// if err := idToken.Claims(&claims); err != nil {
|
||||
// // handle error
|
||||
// }
|
||||
//
|
||||
func (i *IDToken) Claims(v interface{}) error {
|
||||
if i.claims == nil {
|
||||
return errors.New("oidc: claims not set")
|
||||
}
|
||||
return json.Unmarshal(i.claims, v)
|
||||
}
|
||||
|
||||
// VerifyAccessToken verifies that the hash of the access token that corresponds to the iD token
|
||||
// matches the hash in the id token. It returns an error if the hashes don't match.
|
||||
// It is the caller's responsibility to ensure that the optional access token hash is present for the ID token
|
||||
// before calling this method. See https://openid.net/specs/openid-connect-core-1_0.html#CodeIDToken
|
||||
func (i *IDToken) VerifyAccessToken(accessToken string) error {
|
||||
if i.AccessTokenHash == "" {
|
||||
return errNoAtHash
|
||||
}
|
||||
var h hash.Hash
|
||||
switch i.sigAlgorithm {
|
||||
case RS256, ES256, PS256:
|
||||
h = sha256.New()
|
||||
case RS384, ES384, PS384:
|
||||
h = sha512.New384()
|
||||
case RS512, ES512, PS512:
|
||||
h = sha512.New()
|
||||
default:
|
||||
return fmt.Errorf("oidc: unsupported signing algorithm %q", i.sigAlgorithm)
|
||||
}
|
||||
h.Write([]byte(accessToken)) // hash documents that Write will never return an error
|
||||
sum := h.Sum(nil)[:h.Size()/2]
|
||||
actual := base64.RawURLEncoding.EncodeToString(sum)
|
||||
if actual != i.AccessTokenHash {
|
||||
return errInvalidAtHash
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type idToken struct {
|
||||
Issuer string `json:"iss"`
|
||||
Subject string `json:"sub"`
|
||||
Audience audience `json:"aud"`
|
||||
Expiry jsonTime `json:"exp"`
|
||||
IssuedAt jsonTime `json:"iat"`
|
||||
NotBefore *jsonTime `json:"nbf"`
|
||||
Nonce string `json:"nonce"`
|
||||
AtHash string `json:"at_hash"`
|
||||
ClaimNames map[string]string `json:"_claim_names"`
|
||||
ClaimSources map[string]claimSource `json:"_claim_sources"`
|
||||
}
|
||||
|
||||
type claimSource struct {
|
||||
Endpoint string `json:"endpoint"`
|
||||
AccessToken string `json:"access_token"`
|
||||
}
|
||||
|
||||
type stringAsBool bool
|
||||
|
||||
func (sb *stringAsBool) UnmarshalJSON(b []byte) error {
|
||||
switch string(b) {
|
||||
case "true", `"true"`:
|
||||
*sb = true
|
||||
case "false", `"false"`:
|
||||
*sb = false
|
||||
default:
|
||||
return errors.New("invalid value for boolean")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type audience []string
|
||||
|
||||
func (a *audience) UnmarshalJSON(b []byte) error {
|
||||
var s string
|
||||
if json.Unmarshal(b, &s) == nil {
|
||||
*a = audience{s}
|
||||
return nil
|
||||
}
|
||||
var auds []string
|
||||
if err := json.Unmarshal(b, &auds); err != nil {
|
||||
return err
|
||||
}
|
||||
*a = auds
|
||||
return nil
|
||||
}
|
||||
|
||||
type jsonTime time.Time
|
||||
|
||||
func (j *jsonTime) UnmarshalJSON(b []byte) error {
|
||||
var n json.Number
|
||||
if err := json.Unmarshal(b, &n); err != nil {
|
||||
return err
|
||||
}
|
||||
var unix int64
|
||||
|
||||
if t, err := n.Int64(); err == nil {
|
||||
unix = t
|
||||
} else {
|
||||
f, err := n.Float64()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
unix = int64(f)
|
||||
}
|
||||
*j = jsonTime(time.Unix(unix, 0))
|
||||
return nil
|
||||
}
|
||||
|
||||
func unmarshalResp(r *http.Response, body []byte, v interface{}) error {
|
||||
err := json.Unmarshal(body, &v)
|
||||
if err == nil {
|
||||
return nil
|
||||
}
|
||||
ct := r.Header.Get("Content-Type")
|
||||
mediaType, _, parseErr := mime.ParseMediaType(ct)
|
||||
if parseErr == nil && mediaType == "application/json" {
|
||||
return fmt.Errorf("got Content-Type = application/json, but could not unmarshal as JSON: %v", err)
|
||||
}
|
||||
return fmt.Errorf("expected Content-Type = application/json, got %q: %v", ct, err)
|
||||
}
|
344
vendor/github.com/coreos/go-oidc/v3/oidc/verify.go
generated
vendored
Normal file
344
vendor/github.com/coreos/go-oidc/v3/oidc/verify.go
generated
vendored
Normal file
@ -0,0 +1,344 @@
|
||||
package oidc
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"golang.org/x/oauth2"
|
||||
jose "gopkg.in/square/go-jose.v2"
|
||||
)
|
||||
|
||||
const (
|
||||
issuerGoogleAccounts = "https://accounts.google.com"
|
||||
issuerGoogleAccountsNoScheme = "accounts.google.com"
|
||||
)
|
||||
|
||||
// TokenExpiredError indicates that Verify failed because the token was expired. This
|
||||
// error does NOT indicate that the token is not also invalid for other reasons. Other
|
||||
// checks might have failed if the expiration check had not failed.
|
||||
type TokenExpiredError struct {
|
||||
// Expiry is the time when the token expired.
|
||||
Expiry time.Time
|
||||
}
|
||||
|
||||
func (e *TokenExpiredError) Error() string {
|
||||
return fmt.Sprintf("oidc: token is expired (Token Expiry: %v)", e.Expiry)
|
||||
}
|
||||
|
||||
// KeySet is a set of publc JSON Web Keys that can be used to validate the signature
|
||||
// of JSON web tokens. This is expected to be backed by a remote key set through
|
||||
// provider metadata discovery or an in-memory set of keys delivered out-of-band.
|
||||
type KeySet interface {
|
||||
// VerifySignature parses the JSON web token, verifies the signature, and returns
|
||||
// the raw payload. Header and claim fields are validated by other parts of the
|
||||
// package. For example, the KeySet does not need to check values such as signature
|
||||
// algorithm, issuer, and audience since the IDTokenVerifier validates these values
|
||||
// independently.
|
||||
//
|
||||
// If VerifySignature makes HTTP requests to verify the token, it's expected to
|
||||
// use any HTTP client associated with the context through ClientContext.
|
||||
VerifySignature(ctx context.Context, jwt string) (payload []byte, err error)
|
||||
}
|
||||
|
||||
// IDTokenVerifier provides verification for ID Tokens.
|
||||
type IDTokenVerifier struct {
|
||||
keySet KeySet
|
||||
config *Config
|
||||
issuer string
|
||||
}
|
||||
|
||||
// NewVerifier returns a verifier manually constructed from a key set and issuer URL.
|
||||
//
|
||||
// It's easier to use provider discovery to construct an IDTokenVerifier than creating
|
||||
// one directly. This method is intended to be used with provider that don't support
|
||||
// metadata discovery, or avoiding round trips when the key set URL is already known.
|
||||
//
|
||||
// This constructor can be used to create a verifier directly using the issuer URL and
|
||||
// JSON Web Key Set URL without using discovery:
|
||||
//
|
||||
// keySet := oidc.NewRemoteKeySet(ctx, "https://www.googleapis.com/oauth2/v3/certs")
|
||||
// verifier := oidc.NewVerifier("https://accounts.google.com", keySet, config)
|
||||
//
|
||||
// Or a static key set (e.g. for testing):
|
||||
//
|
||||
// keySet := &oidc.StaticKeySet{PublicKeys: []crypto.PublicKey{pub1, pub2}}
|
||||
// verifier := oidc.NewVerifier("https://accounts.google.com", keySet, config)
|
||||
//
|
||||
func NewVerifier(issuerURL string, keySet KeySet, config *Config) *IDTokenVerifier {
|
||||
return &IDTokenVerifier{keySet: keySet, config: config, issuer: issuerURL}
|
||||
}
|
||||
|
||||
// Config is the configuration for an IDTokenVerifier.
|
||||
type Config struct {
|
||||
// Expected audience of the token. For a majority of the cases this is expected to be
|
||||
// the ID of the client that initialized the login flow. It may occasionally differ if
|
||||
// the provider supports the authorizing party (azp) claim.
|
||||
//
|
||||
// If not provided, users must explicitly set SkipClientIDCheck.
|
||||
ClientID string
|
||||
// If specified, only this set of algorithms may be used to sign the JWT.
|
||||
//
|
||||
// If the IDTokenVerifier is created from a provider with (*Provider).Verifier, this
|
||||
// defaults to the set of algorithms the provider supports. Otherwise this values
|
||||
// defaults to RS256.
|
||||
SupportedSigningAlgs []string
|
||||
|
||||
// If true, no ClientID check performed. Must be true if ClientID field is empty.
|
||||
SkipClientIDCheck bool
|
||||
// If true, token expiry is not checked.
|
||||
SkipExpiryCheck bool
|
||||
|
||||
// SkipIssuerCheck is intended for specialized cases where the the caller wishes to
|
||||
// defer issuer validation. When enabled, callers MUST independently verify the Token's
|
||||
// Issuer is a known good value.
|
||||
//
|
||||
// Mismatched issuers often indicate client mis-configuration. If mismatches are
|
||||
// unexpected, evaluate if the provided issuer URL is incorrect instead of enabling
|
||||
// this option.
|
||||
SkipIssuerCheck bool
|
||||
|
||||
// Time function to check Token expiry. Defaults to time.Now
|
||||
Now func() time.Time
|
||||
|
||||
// InsecureSkipSignatureCheck causes this package to skip JWT signature validation.
|
||||
// It's intended for special cases where providers (such as Azure), use the "none"
|
||||
// algorithm.
|
||||
//
|
||||
// This option can only be enabled safely when the ID Token is received directly
|
||||
// from the provider after the token exchange.
|
||||
//
|
||||
// This option MUST NOT be used when receiving an ID Token from sources other
|
||||
// than the token endpoint.
|
||||
InsecureSkipSignatureCheck bool
|
||||
}
|
||||
|
||||
// Verifier returns an IDTokenVerifier that uses the provider's key set to verify JWTs.
|
||||
func (p *Provider) Verifier(config *Config) *IDTokenVerifier {
|
||||
if len(config.SupportedSigningAlgs) == 0 && len(p.algorithms) > 0 {
|
||||
// Make a copy so we don't modify the config values.
|
||||
cp := &Config{}
|
||||
*cp = *config
|
||||
cp.SupportedSigningAlgs = p.algorithms
|
||||
config = cp
|
||||
}
|
||||
return NewVerifier(p.issuer, p.remoteKeySet, config)
|
||||
}
|
||||
|
||||
func parseJWT(p string) ([]byte, error) {
|
||||
parts := strings.Split(p, ".")
|
||||
if len(parts) < 2 {
|
||||
return nil, fmt.Errorf("oidc: malformed jwt, expected 3 parts got %d", len(parts))
|
||||
}
|
||||
payload, err := base64.RawURLEncoding.DecodeString(parts[1])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: malformed jwt payload: %v", err)
|
||||
}
|
||||
return payload, nil
|
||||
}
|
||||
|
||||
func contains(sli []string, ele string) bool {
|
||||
for _, s := range sli {
|
||||
if s == ele {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Returns the Claims from the distributed JWT token
|
||||
func resolveDistributedClaim(ctx context.Context, verifier *IDTokenVerifier, src claimSource) ([]byte, error) {
|
||||
req, err := http.NewRequest("GET", src.Endpoint, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("malformed request: %v", err)
|
||||
}
|
||||
if src.AccessToken != "" {
|
||||
req.Header.Set("Authorization", "Bearer "+src.AccessToken)
|
||||
}
|
||||
|
||||
resp, err := doRequest(ctx, req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: Request to endpoint failed: %v", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to read response body: %v", err)
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("oidc: request failed: %v", resp.StatusCode)
|
||||
}
|
||||
|
||||
token, err := verifier.Verify(ctx, string(body))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("malformed response body: %v", err)
|
||||
}
|
||||
|
||||
return token.claims, nil
|
||||
}
|
||||
|
||||
// Verify parses a raw ID Token, verifies it's been signed by the provider, performs
|
||||
// any additional checks depending on the Config, and returns the payload.
|
||||
//
|
||||
// Verify does NOT do nonce validation, which is the callers responsibility.
|
||||
//
|
||||
// See: https://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation
|
||||
//
|
||||
// oauth2Token, err := oauth2Config.Exchange(ctx, r.URL.Query().Get("code"))
|
||||
// if err != nil {
|
||||
// // handle error
|
||||
// }
|
||||
//
|
||||
// // Extract the ID Token from oauth2 token.
|
||||
// rawIDToken, ok := oauth2Token.Extra("id_token").(string)
|
||||
// if !ok {
|
||||
// // handle error
|
||||
// }
|
||||
//
|
||||
// token, err := verifier.Verify(ctx, rawIDToken)
|
||||
//
|
||||
func (v *IDTokenVerifier) Verify(ctx context.Context, rawIDToken string) (*IDToken, error) {
|
||||
// Throw out tokens with invalid claims before trying to verify the token. This lets
|
||||
// us do cheap checks before possibly re-syncing keys.
|
||||
payload, err := parseJWT(rawIDToken)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: malformed jwt: %v", err)
|
||||
}
|
||||
var token idToken
|
||||
if err := json.Unmarshal(payload, &token); err != nil {
|
||||
return nil, fmt.Errorf("oidc: failed to unmarshal claims: %v", err)
|
||||
}
|
||||
|
||||
distributedClaims := make(map[string]claimSource)
|
||||
|
||||
//step through the token to map claim names to claim sources"
|
||||
for cn, src := range token.ClaimNames {
|
||||
if src == "" {
|
||||
return nil, fmt.Errorf("oidc: failed to obtain source from claim name")
|
||||
}
|
||||
s, ok := token.ClaimSources[src]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("oidc: source does not exist")
|
||||
}
|
||||
distributedClaims[cn] = s
|
||||
}
|
||||
|
||||
t := &IDToken{
|
||||
Issuer: token.Issuer,
|
||||
Subject: token.Subject,
|
||||
Audience: []string(token.Audience),
|
||||
Expiry: time.Time(token.Expiry),
|
||||
IssuedAt: time.Time(token.IssuedAt),
|
||||
Nonce: token.Nonce,
|
||||
AccessTokenHash: token.AtHash,
|
||||
claims: payload,
|
||||
distributedClaims: distributedClaims,
|
||||
}
|
||||
|
||||
// Check issuer.
|
||||
if !v.config.SkipIssuerCheck && t.Issuer != v.issuer {
|
||||
// Google sometimes returns "accounts.google.com" as the issuer claim instead of
|
||||
// the required "https://accounts.google.com". Detect this case and allow it only
|
||||
// for Google.
|
||||
//
|
||||
// We will not add hooks to let other providers go off spec like this.
|
||||
if !(v.issuer == issuerGoogleAccounts && t.Issuer == issuerGoogleAccountsNoScheme) {
|
||||
return nil, fmt.Errorf("oidc: id token issued by a different provider, expected %q got %q", v.issuer, t.Issuer)
|
||||
}
|
||||
}
|
||||
|
||||
// If a client ID has been provided, make sure it's part of the audience. SkipClientIDCheck must be true if ClientID is empty.
|
||||
//
|
||||
// This check DOES NOT ensure that the ClientID is the party to which the ID Token was issued (i.e. Authorized party).
|
||||
if !v.config.SkipClientIDCheck {
|
||||
if v.config.ClientID != "" {
|
||||
if !contains(t.Audience, v.config.ClientID) {
|
||||
return nil, fmt.Errorf("oidc: expected audience %q got %q", v.config.ClientID, t.Audience)
|
||||
}
|
||||
} else {
|
||||
return nil, fmt.Errorf("oidc: invalid configuration, clientID must be provided or SkipClientIDCheck must be set")
|
||||
}
|
||||
}
|
||||
|
||||
// If a SkipExpiryCheck is false, make sure token is not expired.
|
||||
if !v.config.SkipExpiryCheck {
|
||||
now := time.Now
|
||||
if v.config.Now != nil {
|
||||
now = v.config.Now
|
||||
}
|
||||
nowTime := now()
|
||||
|
||||
if t.Expiry.Before(nowTime) {
|
||||
return nil, &TokenExpiredError{Expiry: t.Expiry}
|
||||
}
|
||||
|
||||
// If nbf claim is provided in token, ensure that it is indeed in the past.
|
||||
if token.NotBefore != nil {
|
||||
nbfTime := time.Time(*token.NotBefore)
|
||||
// Set to 5 minutes since this is what other OpenID Connect providers do to deal with clock skew.
|
||||
// https://github.com/AzureAD/azure-activedirectory-identitymodel-extensions-for-dotnet/blob/6.12.2/src/Microsoft.IdentityModel.Tokens/TokenValidationParameters.cs#L149-L153
|
||||
leeway := 5 * time.Minute
|
||||
|
||||
if nowTime.Add(leeway).Before(nbfTime) {
|
||||
return nil, fmt.Errorf("oidc: current time %v before the nbf (not before) time: %v", nowTime, nbfTime)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if v.config.InsecureSkipSignatureCheck {
|
||||
return t, nil
|
||||
}
|
||||
|
||||
jws, err := jose.ParseSigned(rawIDToken)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: malformed jwt: %v", err)
|
||||
}
|
||||
|
||||
switch len(jws.Signatures) {
|
||||
case 0:
|
||||
return nil, fmt.Errorf("oidc: id token not signed")
|
||||
case 1:
|
||||
default:
|
||||
return nil, fmt.Errorf("oidc: multiple signatures on id token not supported")
|
||||
}
|
||||
|
||||
sig := jws.Signatures[0]
|
||||
supportedSigAlgs := v.config.SupportedSigningAlgs
|
||||
if len(supportedSigAlgs) == 0 {
|
||||
supportedSigAlgs = []string{RS256}
|
||||
}
|
||||
|
||||
if !contains(supportedSigAlgs, sig.Header.Algorithm) {
|
||||
return nil, fmt.Errorf("oidc: id token signed with unsupported algorithm, expected %q got %q", supportedSigAlgs, sig.Header.Algorithm)
|
||||
}
|
||||
|
||||
t.sigAlgorithm = sig.Header.Algorithm
|
||||
|
||||
ctx = context.WithValue(ctx, parsedJWTKey, jws)
|
||||
gotPayload, err := v.keySet.VerifySignature(ctx, rawIDToken)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to verify signature: %v", err)
|
||||
}
|
||||
|
||||
// Ensure that the payload returned by the square actually matches the payload parsed earlier.
|
||||
if !bytes.Equal(gotPayload, payload) {
|
||||
return nil, errors.New("oidc: internal error, payload parsed did not match previous payload")
|
||||
}
|
||||
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// Nonce returns an auth code option which requires the ID Token created by the
|
||||
// OpenID Connect provider to contain the specified nonce.
|
||||
func Nonce(nonce string) oauth2.AuthCodeOption {
|
||||
return oauth2.SetAuthURLParam("nonce", nonce)
|
||||
}
|
5
vendor/github.com/go-openapi/analysis/.codecov.yml
generated
vendored
Normal file
5
vendor/github.com/go-openapi/analysis/.codecov.yml
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
coverage:
|
||||
status:
|
||||
patch:
|
||||
default:
|
||||
target: 80%
|
2
vendor/github.com/go-openapi/analysis/.gitattributes
generated
vendored
Normal file
2
vendor/github.com/go-openapi/analysis/.gitattributes
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
*.go text eol=lf
|
||||
|
5
vendor/github.com/go-openapi/analysis/.gitignore
generated
vendored
Normal file
5
vendor/github.com/go-openapi/analysis/.gitignore
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
secrets.yml
|
||||
coverage.out
|
||||
coverage.txt
|
||||
*.cov
|
||||
.idea
|
56
vendor/github.com/go-openapi/analysis/.golangci.yml
generated
vendored
Normal file
56
vendor/github.com/go-openapi/analysis/.golangci.yml
generated
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
linters-settings:
|
||||
govet:
|
||||
check-shadowing: true
|
||||
golint:
|
||||
min-confidence: 0
|
||||
gocyclo:
|
||||
min-complexity: 40
|
||||
gocognit:
|
||||
min-complexity: 40
|
||||
maligned:
|
||||
suggest-new: true
|
||||
dupl:
|
||||
threshold: 150
|
||||
goconst:
|
||||
min-len: 2
|
||||
min-occurrences: 4
|
||||
|
||||
linters:
|
||||
enable-all: true
|
||||
disable:
|
||||
- maligned
|
||||
- lll
|
||||
- gochecknoglobals
|
||||
- gochecknoinits
|
||||
# scopelint is useful, but also reports false positives
|
||||
# that unfortunately can't be disabled. So we disable the
|
||||
# linter rather than changing code that works.
|
||||
# see: https://github.com/kyoh86/scopelint/issues/4
|
||||
- scopelint
|
||||
- godox
|
||||
- gocognit
|
||||
#- whitespace
|
||||
- wsl
|
||||
- funlen
|
||||
- testpackage
|
||||
- wrapcheck
|
||||
#- nlreturn
|
||||
- gomnd
|
||||
- goerr113
|
||||
- exhaustivestruct
|
||||
#- errorlint
|
||||
#- nestif
|
||||
- gofumpt
|
||||
- godot
|
||||
- gci
|
||||
- dogsled
|
||||
- paralleltest
|
||||
- tparallel
|
||||
- thelper
|
||||
- ifshort
|
||||
- forbidigo
|
||||
- cyclop
|
||||
- varnamelen
|
||||
- exhaustruct
|
||||
- nonamedreturns
|
||||
- nosnakecase
|
74
vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md
generated
vendored
Normal file
74
vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md
generated
vendored
Normal file
@ -0,0 +1,74 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as
|
||||
contributors and maintainers pledge to making participation in our project and
|
||||
our community a harassment-free experience for everyone, regardless of age, body
|
||||
size, disability, ethnicity, gender identity and expression, level of experience,
|
||||
nationality, personal appearance, race, religion, or sexual identity and
|
||||
orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces
|
||||
when an individual is representing the project or its community. Examples of
|
||||
representing a project or community include using an official project e-mail
|
||||
address, posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event. Representation of a project may be
|
||||
further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at ivan+abuse@flanders.co.nz. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||
faith may face temporary or permanent repercussions as determined by other
|
||||
members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available at [http://contributor-covenant.org/version/1/4][version]
|
||||
|
||||
[homepage]: http://contributor-covenant.org
|
||||
[version]: http://contributor-covenant.org/version/1/4/
|
202
vendor/github.com/go-openapi/analysis/LICENSE
generated
vendored
Normal file
202
vendor/github.com/go-openapi/analysis/LICENSE
generated
vendored
Normal file
@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
31
vendor/github.com/go-openapi/analysis/README.md
generated
vendored
Normal file
31
vendor/github.com/go-openapi/analysis/README.md
generated
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
# OpenAPI initiative analysis
|
||||
|
||||
[](https://travis-ci.org/go-openapi/analysis)
|
||||
[](https://ci.appveyor.com/project/casualjim/go-openapi/analysis/branch/master)
|
||||
[](https://codecov.io/gh/go-openapi/analysis)
|
||||
[](https://slackin.goswagger.io)
|
||||
[](https://raw.githubusercontent.com/go-openapi/analysis/master/LICENSE)
|
||||
[](https://pkg.go.dev/github.com/go-openapi/analysis)
|
||||
[](https://goreportcard.com/report/github.com/go-openapi/analysis)
|
||||
|
||||
|
||||
A foundational library to analyze an OAI specification document for easier reasoning about the content.
|
||||
|
||||
## What's inside?
|
||||
|
||||
* A analyzer providing methods to walk the functional content of a specification
|
||||
* A spec flattener producing a self-contained document bundle, while preserving `$ref`s
|
||||
* A spec merger ("mixin") to merge several spec documents into a primary spec
|
||||
* A spec "fixer" ensuring that response descriptions are non empty
|
||||
|
||||
[Documentation](https://godoc.org/github.com/go-openapi/analysis)
|
||||
|
||||
## FAQ
|
||||
|
||||
* Does this library support OpenAPI 3?
|
||||
|
||||
> No.
|
||||
> This package currently only supports OpenAPI 2.0 (aka Swagger 2.0).
|
||||
> There is no plan to make it evolve toward supporting OpenAPI 3.x.
|
||||
> This [discussion thread](https://github.com/go-openapi/spec/issues/21) relates the full story.
|
||||
>
|
1064
vendor/github.com/go-openapi/analysis/analyzer.go
generated
vendored
Normal file
1064
vendor/github.com/go-openapi/analysis/analyzer.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
32
vendor/github.com/go-openapi/analysis/appveyor.yml
generated
vendored
Normal file
32
vendor/github.com/go-openapi/analysis/appveyor.yml
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
version: "0.1.{build}"
|
||||
|
||||
clone_folder: C:\go-openapi\analysis
|
||||
shallow_clone: true # for startup speed
|
||||
pull_requests:
|
||||
do_not_increment_build_number: true
|
||||
|
||||
#skip_tags: true
|
||||
#skip_branch_with_pr: true
|
||||
|
||||
# appveyor.yml
|
||||
build: off
|
||||
|
||||
environment:
|
||||
GOPATH: c:\gopath
|
||||
|
||||
stack: go 1.16
|
||||
|
||||
test_script:
|
||||
- go test -v -timeout 20m ./...
|
||||
|
||||
deploy: off
|
||||
|
||||
notifications:
|
||||
- provider: Slack
|
||||
incoming_webhook: https://hooks.slack.com/services/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
|
||||
auth_token:
|
||||
secure: Sf7kZf7ZGbnwWUMpffHwMu5A0cHkLK2MYY32LNTPj4+/3qC3Ghl7+9v4TSLOqOlCwdRNjOGblAq7s+GDJed6/xgRQl1JtCi1klzZNrYX4q01pgTPvvGcwbBkIYgeMaPeIRcK9OZnud7sRXdttozgTOpytps2U6Js32ip7uj5mHSg2ub0FwoSJwlS6dbezZ8+eDhoha0F/guY99BEwx8Bd+zROrT2TFGsSGOFGN6wFc7moCqTHO/YkWib13a2QNXqOxCCVBy/lt76Wp+JkeFppjHlzs/2lP3EAk13RIUAaesdEUHvIHrzCyNJEd3/+KO2DzsWOYfpktd+KBCvgaYOsoo7ubdT3IROeAegZdCgo/6xgCEsmFc9ZcqCfN5yNx2A+BZ2Vwmpws+bQ1E1+B5HDzzaiLcYfG4X2O210QVGVDLWsv1jqD+uPYeHY2WRfh5ZsIUFvaqgUEnwHwrK44/8REAhQavt1QAj5uJpsRd7CkRVPWRNK+yIky+wgbVUFEchRNmS55E7QWf+W4+4QZkQi7vUTMc9nbTUu2Es9NfvfudOpM2wZbn98fjpb/qq/nRv6Bk+ca+7XD5/IgNLMbWp2ouDdzbiHLCOfDUiHiDJhLfFZx9Bwo7ZwfzeOlbrQX66bx7xRKYmOe4DLrXhNcpbsMa8qbfxlZRCmYbubB/Y8h4=
|
||||
channel: bots
|
||||
on_build_success: false
|
||||
on_build_failure: true
|
||||
on_build_status_changed: true
|
23
vendor/github.com/go-openapi/analysis/debug.go
generated
vendored
Normal file
23
vendor/github.com/go-openapi/analysis/debug.go
generated
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package analysis
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/go-openapi/analysis/internal/debug"
|
||||
)
|
||||
|
||||
var debugLog = debug.GetLogger("analysis", os.Getenv("SWAGGER_DEBUG") != "")
|
43
vendor/github.com/go-openapi/analysis/doc.go
generated
vendored
Normal file
43
vendor/github.com/go-openapi/analysis/doc.go
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
/*
|
||||
Package analysis provides methods to work with a Swagger specification document from
|
||||
package go-openapi/spec.
|
||||
|
||||
Analyzing a specification
|
||||
|
||||
An analysed specification object (type Spec) provides methods to work with swagger definition.
|
||||
|
||||
Flattening or expanding a specification
|
||||
|
||||
Flattening a specification bundles all remote $ref in the main spec document.
|
||||
Depending on flattening options, additional preprocessing may take place:
|
||||
- full flattening: replacing all inline complex constructs by a named entry in #/definitions
|
||||
- expand: replace all $ref's in the document by their expanded content
|
||||
|
||||
Merging several specifications
|
||||
|
||||
Mixin several specifications merges all Swagger constructs, and warns about found conflicts.
|
||||
|
||||
Fixing a specification
|
||||
|
||||
Unmarshalling a specification with golang json unmarshalling may lead to
|
||||
some unwanted result on present but empty fields.
|
||||
|
||||
Analyzing a Swagger schema
|
||||
|
||||
Swagger schemas are analyzed to determine their complexity and qualify their content.
|
||||
*/
|
||||
package analysis
|
79
vendor/github.com/go-openapi/analysis/fixer.go
generated
vendored
Normal file
79
vendor/github.com/go-openapi/analysis/fixer.go
generated
vendored
Normal file
@ -0,0 +1,79 @@
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package analysis
|
||||
|
||||
import "github.com/go-openapi/spec"
|
||||
|
||||
// FixEmptyResponseDescriptions replaces empty ("") response
|
||||
// descriptions in the input with "(empty)" to ensure that the
|
||||
// resulting Swagger is stays valid. The problem appears to arise
|
||||
// from reading in valid specs that have a explicit response
|
||||
// description of "" (valid, response.description is required), but
|
||||
// due to zero values being omitted upon re-serializing (omitempty) we
|
||||
// lose them unless we stick some chars in there.
|
||||
func FixEmptyResponseDescriptions(s *spec.Swagger) {
|
||||
for k, v := range s.Responses {
|
||||
FixEmptyDesc(&v) //#nosec
|
||||
s.Responses[k] = v
|
||||
}
|
||||
|
||||
if s.Paths == nil {
|
||||
return
|
||||
}
|
||||
|
||||
for _, v := range s.Paths.Paths {
|
||||
if v.Get != nil {
|
||||
FixEmptyDescs(v.Get.Responses)
|
||||
}
|
||||
if v.Put != nil {
|
||||
FixEmptyDescs(v.Put.Responses)
|
||||
}
|
||||
if v.Post != nil {
|
||||
FixEmptyDescs(v.Post.Responses)
|
||||
}
|
||||
if v.Delete != nil {
|
||||
FixEmptyDescs(v.Delete.Responses)
|
||||
}
|
||||
if v.Options != nil {
|
||||
FixEmptyDescs(v.Options.Responses)
|
||||
}
|
||||
if v.Head != nil {
|
||||
FixEmptyDescs(v.Head.Responses)
|
||||
}
|
||||
if v.Patch != nil {
|
||||
FixEmptyDescs(v.Patch.Responses)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FixEmptyDescs adds "(empty)" as the description for any Response in
|
||||
// the given Responses object that doesn't already have one.
|
||||
func FixEmptyDescs(rs *spec.Responses) {
|
||||
FixEmptyDesc(rs.Default)
|
||||
for k, v := range rs.StatusCodeResponses {
|
||||
FixEmptyDesc(&v) //#nosec
|
||||
rs.StatusCodeResponses[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
// FixEmptyDesc adds "(empty)" as the description to the given
|
||||
// Response object if it doesn't already have one and isn't a
|
||||
// ref. No-op on nil input.
|
||||
func FixEmptyDesc(rs *spec.Response) {
|
||||
if rs == nil || rs.Description != "" || rs.Ref.Ref.GetURL() != nil {
|
||||
return
|
||||
}
|
||||
rs.Description = "(empty)"
|
||||
}
|
802
vendor/github.com/go-openapi/analysis/flatten.go
generated
vendored
Normal file
802
vendor/github.com/go-openapi/analysis/flatten.go
generated
vendored
Normal file
@ -0,0 +1,802 @@
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package analysis
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"path"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/go-openapi/analysis/internal/flatten/normalize"
|
||||
"github.com/go-openapi/analysis/internal/flatten/operations"
|
||||
"github.com/go-openapi/analysis/internal/flatten/replace"
|
||||
"github.com/go-openapi/analysis/internal/flatten/schutils"
|
||||
"github.com/go-openapi/analysis/internal/flatten/sortref"
|
||||
"github.com/go-openapi/jsonpointer"
|
||||
"github.com/go-openapi/spec"
|
||||
)
|
||||
|
||||
const definitionsPath = "#/definitions"
|
||||
|
||||
// newRef stores information about refs created during the flattening process
|
||||
type newRef struct {
|
||||
key string
|
||||
newName string
|
||||
path string
|
||||
isOAIGen bool
|
||||
resolved bool
|
||||
schema *spec.Schema
|
||||
parents []string
|
||||
}
|
||||
|
||||
// context stores intermediary results from flatten
|
||||
type context struct {
|
||||
newRefs map[string]*newRef
|
||||
warnings []string
|
||||
resolved map[string]string
|
||||
}
|
||||
|
||||
func newContext() *context {
|
||||
return &context{
|
||||
newRefs: make(map[string]*newRef, 150),
|
||||
warnings: make([]string, 0),
|
||||
resolved: make(map[string]string, 50),
|
||||
}
|
||||
}
|
||||
|
||||
// Flatten an analyzed spec and produce a self-contained spec bundle.
|
||||
//
|
||||
// There is a minimal and a full flattening mode.
|
||||
//
|
||||
//
|
||||
// Minimally flattening a spec means:
|
||||
// - Expanding parameters, responses, path items, parameter items and header items (references to schemas are left
|
||||
// unscathed)
|
||||
// - Importing external (http, file) references so they become internal to the document
|
||||
// - Moving every JSON pointer to a $ref to a named definition (i.e. the reworked spec does not contain pointers
|
||||
// like "$ref": "#/definitions/myObject/allOfs/1")
|
||||
//
|
||||
// A minimally flattened spec thus guarantees the following properties:
|
||||
// - all $refs point to a local definition (i.e. '#/definitions/...')
|
||||
// - definitions are unique
|
||||
//
|
||||
// NOTE: arbitrary JSON pointers (other than $refs to top level definitions) are rewritten as definitions if they
|
||||
// represent a complex schema or express commonality in the spec.
|
||||
// Otherwise, they are simply expanded.
|
||||
// Self-referencing JSON pointers cannot resolve to a type and trigger an error.
|
||||
//
|
||||
//
|
||||
// Minimal flattening is necessary and sufficient for codegen rendering using go-swagger.
|
||||
//
|
||||
// Fully flattening a spec means:
|
||||
// - Moving every complex inline schema to be a definition with an auto-generated name in a depth-first fashion.
|
||||
//
|
||||
// By complex, we mean every JSON object with some properties.
|
||||
// Arrays, when they do not define a tuple,
|
||||
// or empty objects with or without additionalProperties, are not considered complex and remain inline.
|
||||
//
|
||||
// NOTE: rewritten schemas get a vendor extension x-go-gen-location so we know from which part of the spec definitions
|
||||
// have been created.
|
||||
//
|
||||
// Available flattening options:
|
||||
// - Minimal: stops flattening after minimal $ref processing, leaving schema constructs untouched
|
||||
// - Expand: expand all $ref's in the document (inoperant if Minimal set to true)
|
||||
// - Verbose: croaks about name conflicts detected
|
||||
// - RemoveUnused: removes unused parameters, responses and definitions after expansion/flattening
|
||||
//
|
||||
// NOTE: expansion removes all $ref save circular $ref, which remain in place
|
||||
//
|
||||
// TODO: additional options
|
||||
// - ProgagateNameExtensions: ensure that created entries properly follow naming rules when their parent have set a
|
||||
// x-go-name extension
|
||||
// - LiftAllOfs:
|
||||
// - limit the flattening of allOf members when simple objects
|
||||
// - merge allOf with validation only
|
||||
// - merge allOf with extensions only
|
||||
// - ...
|
||||
//
|
||||
func Flatten(opts FlattenOpts) error {
|
||||
debugLog("FlattenOpts: %#v", opts)
|
||||
|
||||
opts.flattenContext = newContext()
|
||||
|
||||
// 1. Recursively expand responses, parameters, path items and items in simple schemas.
|
||||
//
|
||||
// This simplifies the spec and leaves only the $ref's in schema objects.
|
||||
if err := expand(&opts); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// 2. Strip the current document from absolute $ref's that actually a in the root,
|
||||
// so we can recognize them as proper definitions
|
||||
//
|
||||
// In particular, this works around issue go-openapi/spec#76: leading absolute file in $ref is stripped
|
||||
if err := normalizeRef(&opts); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// 3. Optionally remove shared parameters and responses already expanded (now unused).
|
||||
//
|
||||
// Operation parameters (i.e. under paths) remain.
|
||||
if opts.RemoveUnused {
|
||||
removeUnusedShared(&opts)
|
||||
}
|
||||
|
||||
// 4. Import all remote references.
|
||||
if err := importReferences(&opts); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// 5. full flattening: rewrite inline schemas (schemas that aren't simple types or arrays or maps)
|
||||
if !opts.Minimal && !opts.Expand {
|
||||
if err := nameInlinedSchemas(&opts); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// 6. Rewrite JSON pointers other than $ref to named definitions
|
||||
// and attempt to resolve conflicting names whenever possible.
|
||||
if err := stripPointersAndOAIGen(&opts); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// 7. Strip the spec from unused definitions
|
||||
if opts.RemoveUnused {
|
||||
removeUnused(&opts)
|
||||
}
|
||||
|
||||
// 8. Issue warning notifications, if any
|
||||
opts.croak()
|
||||
|
||||
// TODO: simplify known schema patterns to flat objects with properties
|
||||
// examples:
|
||||
// - lift simple allOf object,
|
||||
// - empty allOf with validation only or extensions only
|
||||
// - rework allOf arrays
|
||||
// - rework allOf additionalProperties
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func expand(opts *FlattenOpts) error {
|
||||
if err := spec.ExpandSpec(opts.Swagger(), opts.ExpandOpts(!opts.Expand)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
opts.Spec.reload() // re-analyze
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// normalizeRef strips the current file from any absolute file $ref. This works around issue go-openapi/spec#76:
|
||||
// leading absolute file in $ref is stripped
|
||||
func normalizeRef(opts *FlattenOpts) error {
|
||||
debugLog("normalizeRef")
|
||||
|
||||
altered := false
|
||||
for k, w := range opts.Spec.references.allRefs {
|
||||
if !strings.HasPrefix(w.String(), opts.BasePath+definitionsPath) { // may be a mix of / and \, depending on OS
|
||||
continue
|
||||
}
|
||||
|
||||
altered = true
|
||||
debugLog("stripping absolute path for: %s", w.String())
|
||||
|
||||
// strip the base path from definition
|
||||
if err := replace.UpdateRef(opts.Swagger(), k,
|
||||
spec.MustCreateRef(path.Join(definitionsPath, path.Base(w.String())))); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if altered {
|
||||
opts.Spec.reload() // re-analyze
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func removeUnusedShared(opts *FlattenOpts) {
|
||||
opts.Swagger().Parameters = nil
|
||||
opts.Swagger().Responses = nil
|
||||
|
||||
opts.Spec.reload() // re-analyze
|
||||
}
|
||||
|
||||
func importReferences(opts *FlattenOpts) error {
|
||||
var (
|
||||
imported bool
|
||||
err error
|
||||
)
|
||||
|
||||
for !imported && err == nil {
|
||||
// iteratively import remote references until none left.
|
||||
// This inlining deals with name conflicts by introducing auto-generated names ("OAIGen")
|
||||
imported, err = importExternalReferences(opts)
|
||||
|
||||
opts.Spec.reload() // re-analyze
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// nameInlinedSchemas replaces every complex inline construct by a named definition.
|
||||
func nameInlinedSchemas(opts *FlattenOpts) error {
|
||||
debugLog("nameInlinedSchemas")
|
||||
|
||||
namer := &InlineSchemaNamer{
|
||||
Spec: opts.Swagger(),
|
||||
Operations: operations.AllOpRefsByRef(opts.Spec, nil),
|
||||
flattenContext: opts.flattenContext,
|
||||
opts: opts,
|
||||
}
|
||||
|
||||
depthFirst := sortref.DepthFirst(opts.Spec.allSchemas)
|
||||
for _, key := range depthFirst {
|
||||
sch := opts.Spec.allSchemas[key]
|
||||
if sch.Schema == nil || sch.Schema.Ref.String() != "" || sch.TopLevel {
|
||||
continue
|
||||
}
|
||||
|
||||
asch, err := Schema(SchemaOpts{Schema: sch.Schema, Root: opts.Swagger(), BasePath: opts.BasePath})
|
||||
if err != nil {
|
||||
return fmt.Errorf("schema analysis [%s]: %w", key, err)
|
||||
}
|
||||
|
||||
if asch.isAnalyzedAsComplex() { // move complex schemas to definitions
|
||||
if err := namer.Name(key, sch.Schema, asch); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
opts.Spec.reload() // re-analyze
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func removeUnused(opts *FlattenOpts) {
|
||||
expected := make(map[string]struct{})
|
||||
for k := range opts.Swagger().Definitions {
|
||||
expected[path.Join(definitionsPath, jsonpointer.Escape(k))] = struct{}{}
|
||||
}
|
||||
|
||||
for _, k := range opts.Spec.AllDefinitionReferences() {
|
||||
delete(expected, k)
|
||||
}
|
||||
|
||||
for k := range expected {
|
||||
debugLog("removing unused definition %s", path.Base(k))
|
||||
if opts.Verbose {
|
||||
log.Printf("info: removing unused definition: %s", path.Base(k))
|
||||
}
|
||||
delete(opts.Swagger().Definitions, path.Base(k))
|
||||
}
|
||||
|
||||
opts.Spec.reload() // re-analyze
|
||||
}
|
||||
|
||||
func importKnownRef(entry sortref.RefRevIdx, refStr, newName string, opts *FlattenOpts) error {
|
||||
// rewrite ref with already resolved external ref (useful for cyclical refs):
|
||||
// rewrite external refs to local ones
|
||||
debugLog("resolving known ref [%s] to %s", refStr, newName)
|
||||
|
||||
for _, key := range entry.Keys {
|
||||
if err := replace.UpdateRef(opts.Swagger(), key, spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func importNewRef(entry sortref.RefRevIdx, refStr string, opts *FlattenOpts) error {
|
||||
var (
|
||||
isOAIGen bool
|
||||
newName string
|
||||
)
|
||||
|
||||
debugLog("resolving schema from remote $ref [%s]", refStr)
|
||||
|
||||
sch, err := spec.ResolveRefWithBase(opts.Swagger(), &entry.Ref, opts.ExpandOpts(false))
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not resolve schema: %w", err)
|
||||
}
|
||||
|
||||
// at this stage only $ref analysis matters
|
||||
partialAnalyzer := &Spec{
|
||||
references: referenceAnalysis{},
|
||||
patterns: patternAnalysis{},
|
||||
enums: enumAnalysis{},
|
||||
}
|
||||
partialAnalyzer.reset()
|
||||
partialAnalyzer.analyzeSchema("", sch, "/")
|
||||
|
||||
// now rewrite those refs with rebase
|
||||
for key, ref := range partialAnalyzer.references.allRefs {
|
||||
if err := replace.UpdateRef(sch, key, spec.MustCreateRef(normalize.RebaseRef(entry.Ref.String(), ref.String()))); err != nil {
|
||||
return fmt.Errorf("failed to rewrite ref for key %q at %s: %w", key, entry.Ref.String(), err)
|
||||
}
|
||||
}
|
||||
|
||||
// generate a unique name - isOAIGen means that a naming conflict was resolved by changing the name
|
||||
newName, isOAIGen = uniqifyName(opts.Swagger().Definitions, nameFromRef(entry.Ref))
|
||||
debugLog("new name for [%s]: %s - with name conflict:%t", strings.Join(entry.Keys, ", "), newName, isOAIGen)
|
||||
|
||||
opts.flattenContext.resolved[refStr] = newName
|
||||
|
||||
// rewrite the external refs to local ones
|
||||
for _, key := range entry.Keys {
|
||||
if err := replace.UpdateRef(opts.Swagger(), key,
|
||||
spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// keep track of created refs
|
||||
resolved := false
|
||||
if _, ok := opts.flattenContext.newRefs[key]; ok {
|
||||
resolved = opts.flattenContext.newRefs[key].resolved
|
||||
}
|
||||
|
||||
debugLog("keeping track of ref: %s (%s), resolved: %t", key, newName, resolved)
|
||||
opts.flattenContext.newRefs[key] = &newRef{
|
||||
key: key,
|
||||
newName: newName,
|
||||
path: path.Join(definitionsPath, newName),
|
||||
isOAIGen: isOAIGen,
|
||||
resolved: resolved,
|
||||
schema: sch,
|
||||
}
|
||||
}
|
||||
|
||||
// add the resolved schema to the definitions
|
||||
schutils.Save(opts.Swagger(), newName, sch)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// importExternalReferences iteratively digs remote references and imports them into the main schema.
|
||||
//
|
||||
// At every iteration, new remotes may be found when digging deeper: they are rebased to the current schema before being imported.
|
||||
//
|
||||
// This returns true when no more remote references can be found.
|
||||
func importExternalReferences(opts *FlattenOpts) (bool, error) {
|
||||
debugLog("importExternalReferences")
|
||||
|
||||
groupedRefs := sortref.ReverseIndex(opts.Spec.references.schemas, opts.BasePath)
|
||||
sortedRefStr := make([]string, 0, len(groupedRefs))
|
||||
if opts.flattenContext == nil {
|
||||
opts.flattenContext = newContext()
|
||||
}
|
||||
|
||||
// sort $ref resolution to ensure deterministic name conflict resolution
|
||||
for refStr := range groupedRefs {
|
||||
sortedRefStr = append(sortedRefStr, refStr)
|
||||
}
|
||||
sort.Strings(sortedRefStr)
|
||||
|
||||
complete := true
|
||||
|
||||
for _, refStr := range sortedRefStr {
|
||||
entry := groupedRefs[refStr]
|
||||
if entry.Ref.HasFragmentOnly {
|
||||
continue
|
||||
}
|
||||
|
||||
complete = false
|
||||
|
||||
newName := opts.flattenContext.resolved[refStr]
|
||||
if newName != "" {
|
||||
if err := importKnownRef(entry, refStr, newName, opts); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// resolve schemas
|
||||
if err := importNewRef(entry, refStr, opts); err != nil {
|
||||
return false, err
|
||||
}
|
||||
}
|
||||
|
||||
// maintains ref index entries
|
||||
for k := range opts.flattenContext.newRefs {
|
||||
r := opts.flattenContext.newRefs[k]
|
||||
|
||||
// update tracking with resolved schemas
|
||||
if r.schema.Ref.String() != "" {
|
||||
ref := spec.MustCreateRef(r.path)
|
||||
sch, err := spec.ResolveRefWithBase(opts.Swagger(), &ref, opts.ExpandOpts(false))
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("could not resolve schema: %w", err)
|
||||
}
|
||||
|
||||
r.schema = sch
|
||||
}
|
||||
|
||||
if r.path == k {
|
||||
continue
|
||||
}
|
||||
|
||||
// update tracking with renamed keys: got a cascade of refs
|
||||
renamed := *r
|
||||
renamed.key = r.path
|
||||
opts.flattenContext.newRefs[renamed.path] = &renamed
|
||||
|
||||
// indirect ref
|
||||
r.newName = path.Base(k)
|
||||
r.schema = spec.RefSchema(r.path)
|
||||
r.path = k
|
||||
r.isOAIGen = strings.Contains(k, "OAIGen")
|
||||
}
|
||||
|
||||
return complete, nil
|
||||
}
|
||||
|
||||
// stripPointersAndOAIGen removes anonymous JSON pointers from spec and chain with name conflicts handler.
|
||||
// This loops until the spec has no such pointer and all name conflicts have been reduced as much as possible.
|
||||
func stripPointersAndOAIGen(opts *FlattenOpts) error {
|
||||
// name all JSON pointers to anonymous documents
|
||||
if err := namePointers(opts); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// remove unnecessary OAIGen ref (created when flattening external refs creates name conflicts)
|
||||
hasIntroducedPointerOrInline, ers := stripOAIGen(opts)
|
||||
if ers != nil {
|
||||
return ers
|
||||
}
|
||||
|
||||
// iterate as pointer or OAIGen resolution may introduce inline schemas or pointers
|
||||
for hasIntroducedPointerOrInline {
|
||||
if !opts.Minimal {
|
||||
opts.Spec.reload() // re-analyze
|
||||
if err := nameInlinedSchemas(opts); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := namePointers(opts); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// restrip and re-analyze
|
||||
var err error
|
||||
if hasIntroducedPointerOrInline, err = stripOAIGen(opts); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// stripOAIGen strips the spec from unnecessary OAIGen constructs, initially created to dedupe flattened definitions.
|
||||
//
|
||||
// A dedupe is deemed unnecessary whenever:
|
||||
// - the only conflict is with its (single) parent: OAIGen is merged into its parent (reinlining)
|
||||
// - there is a conflict with multiple parents: merge OAIGen in first parent, the rewrite other parents to point to
|
||||
// the first parent.
|
||||
//
|
||||
// This function returns true whenever it re-inlined a complex schema, so the caller may chose to iterate
|
||||
// pointer and name resolution again.
|
||||
func stripOAIGen(opts *FlattenOpts) (bool, error) {
|
||||
debugLog("stripOAIGen")
|
||||
replacedWithComplex := false
|
||||
|
||||
// figure out referers of OAIGen definitions (doing it before the ref start mutating)
|
||||
for _, r := range opts.flattenContext.newRefs {
|
||||
updateRefParents(opts.Spec.references.allRefs, r)
|
||||
}
|
||||
|
||||
for k := range opts.flattenContext.newRefs {
|
||||
r := opts.flattenContext.newRefs[k]
|
||||
debugLog("newRefs[%s]: isOAIGen: %t, resolved: %t, name: %s, path:%s, #parents: %d, parents: %v, ref: %s",
|
||||
k, r.isOAIGen, r.resolved, r.newName, r.path, len(r.parents), r.parents, r.schema.Ref.String())
|
||||
|
||||
if !r.isOAIGen || len(r.parents) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
hasReplacedWithComplex, err := stripOAIGenForRef(opts, k, r)
|
||||
if err != nil {
|
||||
return replacedWithComplex, err
|
||||
}
|
||||
|
||||
replacedWithComplex = replacedWithComplex || hasReplacedWithComplex
|
||||
}
|
||||
|
||||
debugLog("replacedWithComplex: %t", replacedWithComplex)
|
||||
opts.Spec.reload() // re-analyze
|
||||
|
||||
return replacedWithComplex, nil
|
||||
}
|
||||
|
||||
// updateRefParents updates all parents of an updated $ref
|
||||
func updateRefParents(allRefs map[string]spec.Ref, r *newRef) {
|
||||
if !r.isOAIGen || r.resolved { // bail on already resolved entries (avoid looping)
|
||||
return
|
||||
}
|
||||
for k, v := range allRefs {
|
||||
if r.path != v.String() {
|
||||
continue
|
||||
}
|
||||
|
||||
found := false
|
||||
for _, p := range r.parents {
|
||||
if p == k {
|
||||
found = true
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
r.parents = append(r.parents, k)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func stripOAIGenForRef(opts *FlattenOpts, k string, r *newRef) (bool, error) {
|
||||
replacedWithComplex := false
|
||||
|
||||
pr := sortref.TopmostFirst(r.parents)
|
||||
|
||||
// rewrite first parent schema in hierarchical then lexicographical order
|
||||
debugLog("rewrite first parent %s with schema", pr[0])
|
||||
if err := replace.UpdateRefWithSchema(opts.Swagger(), pr[0], r.schema); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if pa, ok := opts.flattenContext.newRefs[pr[0]]; ok && pa.isOAIGen {
|
||||
// update parent in ref index entry
|
||||
debugLog("update parent entry: %s", pr[0])
|
||||
pa.schema = r.schema
|
||||
pa.resolved = false
|
||||
replacedWithComplex = true
|
||||
}
|
||||
|
||||
// rewrite other parents to point to first parent
|
||||
if len(pr) > 1 {
|
||||
for _, p := range pr[1:] {
|
||||
replacingRef := spec.MustCreateRef(pr[0])
|
||||
|
||||
// set complex when replacing ref is an anonymous jsonpointer: further processing may be required
|
||||
replacedWithComplex = replacedWithComplex || path.Dir(replacingRef.String()) != definitionsPath
|
||||
debugLog("rewrite parent with ref: %s", replacingRef.String())
|
||||
|
||||
// NOTE: it is possible at this stage to introduce json pointers (to non-definitions places).
|
||||
// Those are stripped later on.
|
||||
if err := replace.UpdateRef(opts.Swagger(), p, replacingRef); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if pa, ok := opts.flattenContext.newRefs[p]; ok && pa.isOAIGen {
|
||||
// update parent in ref index
|
||||
debugLog("update parent entry: %s", p)
|
||||
pa.schema = r.schema
|
||||
pa.resolved = false
|
||||
replacedWithComplex = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// remove OAIGen definition
|
||||
debugLog("removing definition %s", path.Base(r.path))
|
||||
delete(opts.Swagger().Definitions, path.Base(r.path))
|
||||
|
||||
// propagate changes in ref index for keys which have this one as a parent
|
||||
for kk, value := range opts.flattenContext.newRefs {
|
||||
if kk == k || !value.isOAIGen || value.resolved {
|
||||
continue
|
||||
}
|
||||
|
||||
found := false
|
||||
newParents := make([]string, 0, len(value.parents))
|
||||
for _, parent := range value.parents {
|
||||
switch {
|
||||
case parent == r.path:
|
||||
found = true
|
||||
parent = pr[0]
|
||||
case strings.HasPrefix(parent, r.path+"/"):
|
||||
found = true
|
||||
parent = path.Join(pr[0], strings.TrimPrefix(parent, r.path))
|
||||
}
|
||||
|
||||
newParents = append(newParents, parent)
|
||||
}
|
||||
|
||||
if found {
|
||||
value.parents = newParents
|
||||
}
|
||||
}
|
||||
|
||||
// mark naming conflict as resolved
|
||||
debugLog("marking naming conflict resolved for key: %s", r.key)
|
||||
opts.flattenContext.newRefs[r.key].isOAIGen = false
|
||||
opts.flattenContext.newRefs[r.key].resolved = true
|
||||
|
||||
// determine if the previous substitution did inline a complex schema
|
||||
if r.schema != nil && r.schema.Ref.String() == "" { // inline schema
|
||||
asch, err := Schema(SchemaOpts{Schema: r.schema, Root: opts.Swagger(), BasePath: opts.BasePath})
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
debugLog("re-inlined schema: parent: %s, %t", pr[0], asch.isAnalyzedAsComplex())
|
||||
replacedWithComplex = replacedWithComplex || !(path.Dir(pr[0]) == definitionsPath) && asch.isAnalyzedAsComplex()
|
||||
}
|
||||
|
||||
return replacedWithComplex, nil
|
||||
}
|
||||
|
||||
// namePointers replaces all JSON pointers to anonymous documents by a $ref to a new named definitions.
|
||||
//
|
||||
// This is carried on depth-first. Pointers to $refs which are top level definitions are replaced by the $ref itself.
|
||||
// Pointers to simple types are expanded, unless they express commonality (i.e. several such $ref are used).
|
||||
func namePointers(opts *FlattenOpts) error {
|
||||
debugLog("name pointers")
|
||||
|
||||
refsToReplace := make(map[string]SchemaRef, len(opts.Spec.references.schemas))
|
||||
for k, ref := range opts.Spec.references.allRefs {
|
||||
if path.Dir(ref.String()) == definitionsPath {
|
||||
// this a ref to a top-level definition: ok
|
||||
continue
|
||||
}
|
||||
|
||||
result, err := replace.DeepestRef(opts.Swagger(), opts.ExpandOpts(false), ref)
|
||||
if err != nil {
|
||||
return fmt.Errorf("at %s, %w", k, err)
|
||||
}
|
||||
|
||||
replacingRef := result.Ref
|
||||
sch := result.Schema
|
||||
if opts.flattenContext != nil {
|
||||
opts.flattenContext.warnings = append(opts.flattenContext.warnings, result.Warnings...)
|
||||
}
|
||||
|
||||
debugLog("planning pointer to replace at %s: %s, resolved to: %s", k, ref.String(), replacingRef.String())
|
||||
refsToReplace[k] = SchemaRef{
|
||||
Name: k, // caller
|
||||
Ref: replacingRef, // called
|
||||
Schema: sch,
|
||||
TopLevel: path.Dir(replacingRef.String()) == definitionsPath,
|
||||
}
|
||||
}
|
||||
|
||||
depthFirst := sortref.DepthFirst(refsToReplace)
|
||||
namer := &InlineSchemaNamer{
|
||||
Spec: opts.Swagger(),
|
||||
Operations: operations.AllOpRefsByRef(opts.Spec, nil),
|
||||
flattenContext: opts.flattenContext,
|
||||
opts: opts,
|
||||
}
|
||||
|
||||
for _, key := range depthFirst {
|
||||
v := refsToReplace[key]
|
||||
// update current replacement, which may have been updated by previous changes of deeper elements
|
||||
result, erd := replace.DeepestRef(opts.Swagger(), opts.ExpandOpts(false), v.Ref)
|
||||
if erd != nil {
|
||||
return fmt.Errorf("at %s, %w", key, erd)
|
||||
}
|
||||
|
||||
if opts.flattenContext != nil {
|
||||
opts.flattenContext.warnings = append(opts.flattenContext.warnings, result.Warnings...)
|
||||
}
|
||||
|
||||
v.Ref = result.Ref
|
||||
v.Schema = result.Schema
|
||||
v.TopLevel = path.Dir(result.Ref.String()) == definitionsPath
|
||||
debugLog("replacing pointer at %s: resolved to: %s", key, v.Ref.String())
|
||||
|
||||
if v.TopLevel {
|
||||
debugLog("replace pointer %s by canonical definition: %s", key, v.Ref.String())
|
||||
|
||||
// if the schema is a $ref to a top level definition, just rewrite the pointer to this $ref
|
||||
if err := replace.UpdateRef(opts.Swagger(), key, v.Ref); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
if err := flattenAnonPointer(key, v, refsToReplace, namer, opts); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
opts.Spec.reload() // re-analyze
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func flattenAnonPointer(key string, v SchemaRef, refsToReplace map[string]SchemaRef, namer *InlineSchemaNamer, opts *FlattenOpts) error {
|
||||
// this is a JSON pointer to an anonymous document (internal or external):
|
||||
// create a definition for this schema when:
|
||||
// - it is a complex schema
|
||||
// - or it is pointed by more than one $ref (i.e. expresses commonality)
|
||||
// otherwise, expand the pointer (single reference to a simple type)
|
||||
//
|
||||
// The named definition for this follows the target's key, not the caller's
|
||||
debugLog("namePointers at %s for %s", key, v.Ref.String())
|
||||
|
||||
// qualify the expanded schema
|
||||
asch, ers := Schema(SchemaOpts{Schema: v.Schema, Root: opts.Swagger(), BasePath: opts.BasePath})
|
||||
if ers != nil {
|
||||
return fmt.Errorf("schema analysis [%s]: %w", key, ers)
|
||||
}
|
||||
callers := make([]string, 0, 64)
|
||||
|
||||
debugLog("looking for callers")
|
||||
|
||||
an := New(opts.Swagger())
|
||||
for k, w := range an.references.allRefs {
|
||||
r, err := replace.DeepestRef(opts.Swagger(), opts.ExpandOpts(false), w)
|
||||
if err != nil {
|
||||
return fmt.Errorf("at %s, %w", key, err)
|
||||
}
|
||||
|
||||
if opts.flattenContext != nil {
|
||||
opts.flattenContext.warnings = append(opts.flattenContext.warnings, r.Warnings...)
|
||||
}
|
||||
|
||||
if r.Ref.String() == v.Ref.String() {
|
||||
callers = append(callers, k)
|
||||
}
|
||||
}
|
||||
|
||||
debugLog("callers for %s: %d", v.Ref.String(), len(callers))
|
||||
if len(callers) == 0 {
|
||||
// has already been updated and resolved
|
||||
return nil
|
||||
}
|
||||
|
||||
parts := sortref.KeyParts(v.Ref.String())
|
||||
debugLog("number of callers for %s: %d", v.Ref.String(), len(callers))
|
||||
|
||||
// identifying edge case when the namer did nothing because we point to a non-schema object
|
||||
// no definition is created and we expand the $ref for all callers
|
||||
if (!asch.IsSimpleSchema || len(callers) > 1) && !parts.IsSharedParam() && !parts.IsSharedResponse() {
|
||||
debugLog("replace JSON pointer at [%s] by definition: %s", key, v.Ref.String())
|
||||
if err := namer.Name(v.Ref.String(), v.Schema, asch); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// regular case: we named the $ref as a definition, and we move all callers to this new $ref
|
||||
for _, caller := range callers {
|
||||
if caller == key {
|
||||
continue
|
||||
}
|
||||
|
||||
// move $ref for next to resolve
|
||||
debugLog("identified caller of %s at [%s]", v.Ref.String(), caller)
|
||||
c := refsToReplace[caller]
|
||||
c.Ref = v.Ref
|
||||
refsToReplace[caller] = c
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
debugLog("expand JSON pointer for key=%s", key)
|
||||
|
||||
if err := replace.UpdateRefWithSchema(opts.Swagger(), key, v.Schema); err != nil {
|
||||
return err
|
||||
}
|
||||
// NOTE: there is no other caller to update
|
||||
|
||||
return nil
|
||||
}
|
293
vendor/github.com/go-openapi/analysis/flatten_name.go
generated
vendored
Normal file
293
vendor/github.com/go-openapi/analysis/flatten_name.go
generated
vendored
Normal file
@ -0,0 +1,293 @@
|
||||
package analysis
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/go-openapi/analysis/internal/flatten/operations"
|
||||
"github.com/go-openapi/analysis/internal/flatten/replace"
|
||||
"github.com/go-openapi/analysis/internal/flatten/schutils"
|
||||
"github.com/go-openapi/analysis/internal/flatten/sortref"
|
||||
"github.com/go-openapi/spec"
|
||||
"github.com/go-openapi/swag"
|
||||
)
|
||||
|
||||
// InlineSchemaNamer finds a new name for an inlined type
|
||||
type InlineSchemaNamer struct {
|
||||
Spec *spec.Swagger
|
||||
Operations map[string]operations.OpRef
|
||||
flattenContext *context
|
||||
opts *FlattenOpts
|
||||
}
|
||||
|
||||
// Name yields a new name for the inline schema
|
||||
func (isn *InlineSchemaNamer) Name(key string, schema *spec.Schema, aschema *AnalyzedSchema) error {
|
||||
debugLog("naming inlined schema at %s", key)
|
||||
|
||||
parts := sortref.KeyParts(key)
|
||||
for _, name := range namesFromKey(parts, aschema, isn.Operations) {
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// create unique name
|
||||
newName, isOAIGen := uniqifyName(isn.Spec.Definitions, swag.ToJSONName(name))
|
||||
|
||||
// clone schema
|
||||
sch := schutils.Clone(schema)
|
||||
|
||||
// replace values on schema
|
||||
if err := replace.RewriteSchemaToRef(isn.Spec, key,
|
||||
spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
|
||||
return fmt.Errorf("error while creating definition %q from inline schema: %w", newName, err)
|
||||
}
|
||||
|
||||
// rewrite any dependent $ref pointing to this place,
|
||||
// when not already pointing to a top-level definition.
|
||||
//
|
||||
// NOTE: this is important if such referers use arbitrary JSON pointers.
|
||||
an := New(isn.Spec)
|
||||
for k, v := range an.references.allRefs {
|
||||
r, erd := replace.DeepestRef(isn.opts.Swagger(), isn.opts.ExpandOpts(false), v)
|
||||
if erd != nil {
|
||||
return fmt.Errorf("at %s, %w", k, erd)
|
||||
}
|
||||
|
||||
if isn.opts.flattenContext != nil {
|
||||
isn.opts.flattenContext.warnings = append(isn.opts.flattenContext.warnings, r.Warnings...)
|
||||
}
|
||||
|
||||
if r.Ref.String() != key && (r.Ref.String() != path.Join(definitionsPath, newName) || path.Dir(v.String()) == definitionsPath) {
|
||||
continue
|
||||
}
|
||||
|
||||
debugLog("found a $ref to a rewritten schema: %s points to %s", k, v.String())
|
||||
|
||||
// rewrite $ref to the new target
|
||||
if err := replace.UpdateRef(isn.Spec, k,
|
||||
spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: this extension is currently not used by go-swagger (provided for information only)
|
||||
sch.AddExtension("x-go-gen-location", GenLocation(parts))
|
||||
|
||||
// save cloned schema to definitions
|
||||
schutils.Save(isn.Spec, newName, sch)
|
||||
|
||||
// keep track of created refs
|
||||
if isn.flattenContext == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
debugLog("track created ref: key=%s, newName=%s, isOAIGen=%t", key, newName, isOAIGen)
|
||||
resolved := false
|
||||
|
||||
if _, ok := isn.flattenContext.newRefs[key]; ok {
|
||||
resolved = isn.flattenContext.newRefs[key].resolved
|
||||
}
|
||||
|
||||
isn.flattenContext.newRefs[key] = &newRef{
|
||||
key: key,
|
||||
newName: newName,
|
||||
path: path.Join(definitionsPath, newName),
|
||||
isOAIGen: isOAIGen,
|
||||
resolved: resolved,
|
||||
schema: sch,
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// uniqifyName yields a unique name for a definition
|
||||
func uniqifyName(definitions spec.Definitions, name string) (string, bool) {
|
||||
isOAIGen := false
|
||||
if name == "" {
|
||||
name = "oaiGen"
|
||||
isOAIGen = true
|
||||
}
|
||||
|
||||
if len(definitions) == 0 {
|
||||
return name, isOAIGen
|
||||
}
|
||||
|
||||
unq := true
|
||||
for k := range definitions {
|
||||
if strings.EqualFold(k, name) {
|
||||
unq = false
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if unq {
|
||||
return name, isOAIGen
|
||||
}
|
||||
|
||||
name += "OAIGen"
|
||||
isOAIGen = true
|
||||
var idx int
|
||||
unique := name
|
||||
_, known := definitions[unique]
|
||||
|
||||
for known {
|
||||
idx++
|
||||
unique = fmt.Sprintf("%s%d", name, idx)
|
||||
_, known = definitions[unique]
|
||||
}
|
||||
|
||||
return unique, isOAIGen
|
||||
}
|
||||
|
||||
func namesFromKey(parts sortref.SplitKey, aschema *AnalyzedSchema, operations map[string]operations.OpRef) []string {
|
||||
var (
|
||||
baseNames [][]string
|
||||
startIndex int
|
||||
)
|
||||
|
||||
if parts.IsOperation() {
|
||||
baseNames, startIndex = namesForOperation(parts, operations)
|
||||
}
|
||||
|
||||
// definitions
|
||||
if parts.IsDefinition() {
|
||||
baseNames, startIndex = namesForDefinition(parts)
|
||||
}
|
||||
|
||||
result := make([]string, 0, len(baseNames))
|
||||
for _, segments := range baseNames {
|
||||
nm := parts.BuildName(segments, startIndex, partAdder(aschema))
|
||||
if nm == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
result = append(result, nm)
|
||||
}
|
||||
sort.Strings(result)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func namesForParam(parts sortref.SplitKey, operations map[string]operations.OpRef) ([][]string, int) {
|
||||
var (
|
||||
baseNames [][]string
|
||||
startIndex int
|
||||
)
|
||||
|
||||
piref := parts.PathItemRef()
|
||||
if piref.String() != "" && parts.IsOperationParam() {
|
||||
if op, ok := operations[piref.String()]; ok {
|
||||
startIndex = 5
|
||||
baseNames = append(baseNames, []string{op.ID, "params", "body"})
|
||||
}
|
||||
} else if parts.IsSharedOperationParam() {
|
||||
pref := parts.PathRef()
|
||||
for k, v := range operations {
|
||||
if strings.HasPrefix(k, pref.String()) {
|
||||
startIndex = 4
|
||||
baseNames = append(baseNames, []string{v.ID, "params", "body"})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return baseNames, startIndex
|
||||
}
|
||||
|
||||
func namesForOperation(parts sortref.SplitKey, operations map[string]operations.OpRef) ([][]string, int) {
|
||||
var (
|
||||
baseNames [][]string
|
||||
startIndex int
|
||||
)
|
||||
|
||||
// params
|
||||
if parts.IsOperationParam() || parts.IsSharedOperationParam() {
|
||||
baseNames, startIndex = namesForParam(parts, operations)
|
||||
}
|
||||
|
||||
// responses
|
||||
if parts.IsOperationResponse() {
|
||||
piref := parts.PathItemRef()
|
||||
if piref.String() != "" {
|
||||
if op, ok := operations[piref.String()]; ok {
|
||||
startIndex = 6
|
||||
baseNames = append(baseNames, []string{op.ID, parts.ResponseName(), "body"})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return baseNames, startIndex
|
||||
}
|
||||
|
||||
func namesForDefinition(parts sortref.SplitKey) ([][]string, int) {
|
||||
nm := parts.DefinitionName()
|
||||
if nm != "" {
|
||||
return [][]string{{parts.DefinitionName()}}, 2
|
||||
}
|
||||
|
||||
return [][]string{}, 0
|
||||
}
|
||||
|
||||
// partAdder knows how to interpret a schema when it comes to build a name from parts
|
||||
func partAdder(aschema *AnalyzedSchema) sortref.PartAdder {
|
||||
return func(part string) []string {
|
||||
segments := make([]string, 0, 2)
|
||||
|
||||
if part == "items" || part == "additionalItems" {
|
||||
if aschema.IsTuple || aschema.IsTupleWithExtra {
|
||||
segments = append(segments, "tuple")
|
||||
} else {
|
||||
segments = append(segments, "items")
|
||||
}
|
||||
|
||||
if part == "additionalItems" {
|
||||
segments = append(segments, part)
|
||||
}
|
||||
|
||||
return segments
|
||||
}
|
||||
|
||||
segments = append(segments, part)
|
||||
|
||||
return segments
|
||||
}
|
||||
}
|
||||
|
||||
func nameFromRef(ref spec.Ref) string {
|
||||
u := ref.GetURL()
|
||||
if u.Fragment != "" {
|
||||
return swag.ToJSONName(path.Base(u.Fragment))
|
||||
}
|
||||
|
||||
if u.Path != "" {
|
||||
bn := path.Base(u.Path)
|
||||
if bn != "" && bn != "/" {
|
||||
ext := path.Ext(bn)
|
||||
if ext != "" {
|
||||
return swag.ToJSONName(bn[:len(bn)-len(ext)])
|
||||
}
|
||||
|
||||
return swag.ToJSONName(bn)
|
||||
}
|
||||
}
|
||||
|
||||
return swag.ToJSONName(strings.ReplaceAll(u.Host, ".", " "))
|
||||
}
|
||||
|
||||
// GenLocation indicates from which section of the specification (models or operations) a definition has been created.
|
||||
//
|
||||
// This is reflected in the output spec with a "x-go-gen-location" extension. At the moment, this is is provided
|
||||
// for information only.
|
||||
func GenLocation(parts sortref.SplitKey) string {
|
||||
switch {
|
||||
case parts.IsOperation():
|
||||
return "operations"
|
||||
case parts.IsDefinition():
|
||||
return "models"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
78
vendor/github.com/go-openapi/analysis/flatten_options.go
generated
vendored
Normal file
78
vendor/github.com/go-openapi/analysis/flatten_options.go
generated
vendored
Normal file
@ -0,0 +1,78 @@
|
||||
package analysis
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/go-openapi/spec"
|
||||
)
|
||||
|
||||
// FlattenOpts configuration for flattening a swagger specification.
|
||||
//
|
||||
// The BasePath parameter is used to locate remote relative $ref found in the specification.
|
||||
// This path is a file: it points to the location of the root document and may be either a local
|
||||
// file path or a URL.
|
||||
//
|
||||
// If none specified, relative references (e.g. "$ref": "folder/schema.yaml#/definitions/...")
|
||||
// found in the spec are searched from the current working directory.
|
||||
type FlattenOpts struct {
|
||||
Spec *Spec // The analyzed spec to work with
|
||||
flattenContext *context // Internal context to track flattening activity
|
||||
|
||||
BasePath string // The location of the root document for this spec to resolve relative $ref
|
||||
|
||||
// Flattening options
|
||||
Expand bool // When true, skip flattening the spec and expand it instead (if Minimal is false)
|
||||
Minimal bool // When true, do not decompose complex structures such as allOf
|
||||
Verbose bool // enable some reporting on possible name conflicts detected
|
||||
RemoveUnused bool // When true, remove unused parameters, responses and definitions after expansion/flattening
|
||||
ContinueOnError bool // Continue when spec expansion issues are found
|
||||
|
||||
/* Extra keys */
|
||||
_ struct{} // require keys
|
||||
}
|
||||
|
||||
// ExpandOpts creates a spec.ExpandOptions to configure expanding a specification document.
|
||||
func (f *FlattenOpts) ExpandOpts(skipSchemas bool) *spec.ExpandOptions {
|
||||
return &spec.ExpandOptions{
|
||||
RelativeBase: f.BasePath,
|
||||
SkipSchemas: skipSchemas,
|
||||
ContinueOnError: f.ContinueOnError,
|
||||
}
|
||||
}
|
||||
|
||||
// Swagger gets the swagger specification for this flatten operation
|
||||
func (f *FlattenOpts) Swagger() *spec.Swagger {
|
||||
return f.Spec.spec
|
||||
}
|
||||
|
||||
// croak logs notifications and warnings about valid, but possibly unwanted constructs resulting
|
||||
// from flattening a spec
|
||||
func (f *FlattenOpts) croak() {
|
||||
if !f.Verbose {
|
||||
return
|
||||
}
|
||||
|
||||
reported := make(map[string]bool, len(f.flattenContext.newRefs))
|
||||
for _, v := range f.Spec.references.allRefs {
|
||||
// warns about duplicate handling
|
||||
for _, r := range f.flattenContext.newRefs {
|
||||
if r.isOAIGen && r.path == v.String() {
|
||||
reported[r.newName] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for k := range reported {
|
||||
log.Printf("warning: duplicate flattened definition name resolved as %s", k)
|
||||
}
|
||||
|
||||
// warns about possible type mismatches
|
||||
uniqueMsg := make(map[string]bool)
|
||||
for _, msg := range f.flattenContext.warnings {
|
||||
if _, ok := uniqueMsg[msg]; ok {
|
||||
continue
|
||||
}
|
||||
log.Printf("warning: %s", msg)
|
||||
uniqueMsg[msg] = true
|
||||
}
|
||||
}
|
41
vendor/github.com/go-openapi/analysis/internal/debug/debug.go
generated
vendored
Normal file
41
vendor/github.com/go-openapi/analysis/internal/debug/debug.go
generated
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package debug
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
)
|
||||
|
||||
var (
|
||||
output = os.Stdout
|
||||
)
|
||||
|
||||
// GetLogger provides a prefix debug logger
|
||||
func GetLogger(prefix string, debug bool) func(string, ...interface{}) {
|
||||
if debug {
|
||||
logger := log.New(output, fmt.Sprintf("%s:", prefix), log.LstdFlags)
|
||||
|
||||
return func(msg string, args ...interface{}) {
|
||||
_, file1, pos1, _ := runtime.Caller(1)
|
||||
logger.Printf("%s:%d: %s", filepath.Base(file1), pos1, fmt.Sprintf(msg, args...))
|
||||
}
|
||||
}
|
||||
|
||||
return func(msg string, args ...interface{}) {}
|
||||
}
|
87
vendor/github.com/go-openapi/analysis/internal/flatten/normalize/normalize.go
generated
vendored
Normal file
87
vendor/github.com/go-openapi/analysis/internal/flatten/normalize/normalize.go
generated
vendored
Normal file
@ -0,0 +1,87 @@
|
||||
package normalize
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/go-openapi/spec"
|
||||
)
|
||||
|
||||
// RebaseRef rebases a remote ref relative to a base ref.
|
||||
//
|
||||
// NOTE: does not support JSONschema ID for $ref (we assume we are working with swagger specs here).
|
||||
//
|
||||
// NOTE(windows):
|
||||
// * refs are assumed to have been normalized with drive letter lower cased (from go-openapi/spec)
|
||||
// * "/ in paths may appear as escape sequences
|
||||
func RebaseRef(baseRef string, ref string) string {
|
||||
baseRef, _ = url.PathUnescape(baseRef)
|
||||
ref, _ = url.PathUnescape(ref)
|
||||
|
||||
if baseRef == "" || baseRef == "." || strings.HasPrefix(baseRef, "#") {
|
||||
return ref
|
||||
}
|
||||
|
||||
parts := strings.Split(ref, "#")
|
||||
|
||||
baseParts := strings.Split(baseRef, "#")
|
||||
baseURL, _ := url.Parse(baseParts[0])
|
||||
if strings.HasPrefix(ref, "#") {
|
||||
if baseURL.Host == "" {
|
||||
return strings.Join([]string{baseParts[0], parts[1]}, "#")
|
||||
}
|
||||
|
||||
return strings.Join([]string{baseParts[0], parts[1]}, "#")
|
||||
}
|
||||
|
||||
refURL, _ := url.Parse(parts[0])
|
||||
if refURL.Host != "" || filepath.IsAbs(parts[0]) {
|
||||
// not rebasing an absolute path
|
||||
return ref
|
||||
}
|
||||
|
||||
// there is a relative path
|
||||
var basePath string
|
||||
if baseURL.Host != "" {
|
||||
// when there is a host, standard URI rules apply (with "/")
|
||||
baseURL.Path = path.Dir(baseURL.Path)
|
||||
baseURL.Path = path.Join(baseURL.Path, "/"+parts[0])
|
||||
|
||||
return baseURL.String()
|
||||
}
|
||||
|
||||
// this is a local relative path
|
||||
// basePart[0] and parts[0] are local filesystem directories/files
|
||||
basePath = filepath.Dir(baseParts[0])
|
||||
relPath := filepath.Join(basePath, string(filepath.Separator)+parts[0])
|
||||
if len(parts) > 1 {
|
||||
return strings.Join([]string{relPath, parts[1]}, "#")
|
||||
}
|
||||
|
||||
return relPath
|
||||
}
|
||||
|
||||
// Path renders absolute path on remote file refs
|
||||
//
|
||||
// NOTE(windows):
|
||||
// * refs are assumed to have been normalized with drive letter lower cased (from go-openapi/spec)
|
||||
// * "/ in paths may appear as escape sequences
|
||||
func Path(ref spec.Ref, basePath string) string {
|
||||
uri, _ := url.PathUnescape(ref.String())
|
||||
if ref.HasFragmentOnly || filepath.IsAbs(uri) {
|
||||
return uri
|
||||
}
|
||||
|
||||
refURL, _ := url.Parse(uri)
|
||||
if refURL.Host != "" {
|
||||
return uri
|
||||
}
|
||||
|
||||
parts := strings.Split(uri, "#")
|
||||
// BasePath, parts[0] are local filesystem directories, guaranteed to be absolute at this stage
|
||||
parts[0] = filepath.Join(filepath.Dir(basePath), parts[0])
|
||||
|
||||
return strings.Join(parts, "#")
|
||||
}
|
90
vendor/github.com/go-openapi/analysis/internal/flatten/operations/operations.go
generated
vendored
Normal file
90
vendor/github.com/go-openapi/analysis/internal/flatten/operations/operations.go
generated
vendored
Normal file
@ -0,0 +1,90 @@
|
||||
package operations
|
||||
|
||||
import (
|
||||
"path"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/go-openapi/jsonpointer"
|
||||
"github.com/go-openapi/spec"
|
||||
"github.com/go-openapi/swag"
|
||||
)
|
||||
|
||||
// AllOpRefsByRef returns an index of sortable operations
|
||||
func AllOpRefsByRef(specDoc Provider, operationIDs []string) map[string]OpRef {
|
||||
return OpRefsByRef(GatherOperations(specDoc, operationIDs))
|
||||
}
|
||||
|
||||
// OpRefsByRef indexes a map of sortable operations
|
||||
func OpRefsByRef(oprefs map[string]OpRef) map[string]OpRef {
|
||||
result := make(map[string]OpRef, len(oprefs))
|
||||
for _, v := range oprefs {
|
||||
result[v.Ref.String()] = v
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// OpRef is an indexable, sortable operation
|
||||
type OpRef struct {
|
||||
Method string
|
||||
Path string
|
||||
Key string
|
||||
ID string
|
||||
Op *spec.Operation
|
||||
Ref spec.Ref
|
||||
}
|
||||
|
||||
// OpRefs is a sortable collection of operations
|
||||
type OpRefs []OpRef
|
||||
|
||||
func (o OpRefs) Len() int { return len(o) }
|
||||
func (o OpRefs) Swap(i, j int) { o[i], o[j] = o[j], o[i] }
|
||||
func (o OpRefs) Less(i, j int) bool { return o[i].Key < o[j].Key }
|
||||
|
||||
// Provider knows how to collect operations from a spec
|
||||
type Provider interface {
|
||||
Operations() map[string]map[string]*spec.Operation
|
||||
}
|
||||
|
||||
// GatherOperations builds a map of sorted operations from a spec
|
||||
func GatherOperations(specDoc Provider, operationIDs []string) map[string]OpRef {
|
||||
var oprefs OpRefs
|
||||
|
||||
for method, pathItem := range specDoc.Operations() {
|
||||
for pth, operation := range pathItem {
|
||||
vv := *operation
|
||||
oprefs = append(oprefs, OpRef{
|
||||
Key: swag.ToGoName(strings.ToLower(method) + " " + pth),
|
||||
Method: method,
|
||||
Path: pth,
|
||||
ID: vv.ID,
|
||||
Op: &vv,
|
||||
Ref: spec.MustCreateRef("#" + path.Join("/paths", jsonpointer.Escape(pth), method)),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
sort.Sort(oprefs)
|
||||
|
||||
operations := make(map[string]OpRef)
|
||||
for _, opr := range oprefs {
|
||||
nm := opr.ID
|
||||
if nm == "" {
|
||||
nm = opr.Key
|
||||
}
|
||||
|
||||
oo, found := operations[nm]
|
||||
if found && oo.Method != opr.Method && oo.Path != opr.Path {
|
||||
nm = opr.Key
|
||||
}
|
||||
|
||||
if len(operationIDs) == 0 || swag.ContainsStrings(operationIDs, opr.ID) || swag.ContainsStrings(operationIDs, nm) {
|
||||
opr.ID = nm
|
||||
opr.Op.ID = nm
|
||||
operations[nm] = opr
|
||||
}
|
||||
}
|
||||
|
||||
return operations
|
||||
}
|
434
vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go
generated
vendored
Normal file
434
vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go
generated
vendored
Normal file
@ -0,0 +1,434 @@
|
||||
package replace
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
"os"
|
||||
"path"
|
||||
"strconv"
|
||||
|
||||
"github.com/go-openapi/analysis/internal/debug"
|
||||
"github.com/go-openapi/jsonpointer"
|
||||
"github.com/go-openapi/spec"
|
||||
)
|
||||
|
||||
const definitionsPath = "#/definitions"
|
||||
|
||||
var debugLog = debug.GetLogger("analysis/flatten/replace", os.Getenv("SWAGGER_DEBUG") != "")
|
||||
|
||||
// RewriteSchemaToRef replaces a schema with a Ref
|
||||
func RewriteSchemaToRef(sp *spec.Swagger, key string, ref spec.Ref) error {
|
||||
debugLog("rewriting schema to ref for %s with %s", key, ref.String())
|
||||
_, value, err := getPointerFromKey(sp, key)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch refable := value.(type) {
|
||||
case *spec.Schema:
|
||||
return rewriteParentRef(sp, key, ref)
|
||||
|
||||
case spec.Schema:
|
||||
return rewriteParentRef(sp, key, ref)
|
||||
|
||||
case *spec.SchemaOrArray:
|
||||
if refable.Schema != nil {
|
||||
refable.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
|
||||
}
|
||||
|
||||
case *spec.SchemaOrBool:
|
||||
if refable.Schema != nil {
|
||||
refable.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("no schema with ref found at %s for %T", key, value)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func rewriteParentRef(sp *spec.Swagger, key string, ref spec.Ref) error {
|
||||
parent, entry, pvalue, err := getParentFromKey(sp, key)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
debugLog("rewriting holder for %T", pvalue)
|
||||
switch container := pvalue.(type) {
|
||||
case spec.Response:
|
||||
if err := rewriteParentRef(sp, "#"+parent, ref); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
case *spec.Response:
|
||||
container.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
|
||||
|
||||
case *spec.Responses:
|
||||
statusCode, err := strconv.Atoi(entry)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s not a number: %w", key[1:], err)
|
||||
}
|
||||
resp := container.StatusCodeResponses[statusCode]
|
||||
resp.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
|
||||
container.StatusCodeResponses[statusCode] = resp
|
||||
|
||||
case map[string]spec.Response:
|
||||
resp := container[entry]
|
||||
resp.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
|
||||
container[entry] = resp
|
||||
|
||||
case spec.Parameter:
|
||||
if err := rewriteParentRef(sp, "#"+parent, ref); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
case map[string]spec.Parameter:
|
||||
param := container[entry]
|
||||
param.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
|
||||
container[entry] = param
|
||||
|
||||
case []spec.Parameter:
|
||||
idx, err := strconv.Atoi(entry)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s not a number: %w", key[1:], err)
|
||||
}
|
||||
param := container[idx]
|
||||
param.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
|
||||
container[idx] = param
|
||||
|
||||
case spec.Definitions:
|
||||
container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
|
||||
|
||||
case map[string]spec.Schema:
|
||||
container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
|
||||
|
||||
case []spec.Schema:
|
||||
idx, err := strconv.Atoi(entry)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s not a number: %w", key[1:], err)
|
||||
}
|
||||
container[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
|
||||
|
||||
case *spec.SchemaOrArray:
|
||||
// NOTE: this is necessarily an array - otherwise, the parent would be *Schema
|
||||
idx, err := strconv.Atoi(entry)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s not a number: %w", key[1:], err)
|
||||
}
|
||||
container.Schemas[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
|
||||
|
||||
case spec.SchemaProperties:
|
||||
container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
|
||||
|
||||
// NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
|
||||
|
||||
default:
|
||||
return fmt.Errorf("unhandled parent schema rewrite %s (%T)", key, pvalue)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// getPointerFromKey retrieves the content of the JSON pointer "key"
|
||||
func getPointerFromKey(sp interface{}, key string) (string, interface{}, error) {
|
||||
switch sp.(type) {
|
||||
case *spec.Schema:
|
||||
case *spec.Swagger:
|
||||
default:
|
||||
panic("unexpected type used in getPointerFromKey")
|
||||
}
|
||||
if key == "#/" {
|
||||
return "", sp, nil
|
||||
}
|
||||
// unescape chars in key, e.g. "{}" from path params
|
||||
pth, _ := url.PathUnescape(key[1:])
|
||||
ptr, err := jsonpointer.New(pth)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
value, _, err := ptr.Get(sp)
|
||||
if err != nil {
|
||||
debugLog("error when getting key: %s with path: %s", key, pth)
|
||||
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
return pth, value, nil
|
||||
}
|
||||
|
||||
// getParentFromKey retrieves the container of the JSON pointer "key"
|
||||
func getParentFromKey(sp interface{}, key string) (string, string, interface{}, error) {
|
||||
switch sp.(type) {
|
||||
case *spec.Schema:
|
||||
case *spec.Swagger:
|
||||
default:
|
||||
panic("unexpected type used in getPointerFromKey")
|
||||
}
|
||||
// unescape chars in key, e.g. "{}" from path params
|
||||
pth, _ := url.PathUnescape(key[1:])
|
||||
|
||||
parent, entry := path.Dir(pth), path.Base(pth)
|
||||
debugLog("getting schema holder at: %s, with entry: %s", parent, entry)
|
||||
|
||||
pptr, err := jsonpointer.New(parent)
|
||||
if err != nil {
|
||||
return "", "", nil, err
|
||||
}
|
||||
pvalue, _, err := pptr.Get(sp)
|
||||
if err != nil {
|
||||
return "", "", nil, fmt.Errorf("can't get parent for %s: %w", parent, err)
|
||||
}
|
||||
|
||||
return parent, entry, pvalue, nil
|
||||
}
|
||||
|
||||
// UpdateRef replaces a ref by another one
|
||||
func UpdateRef(sp interface{}, key string, ref spec.Ref) error {
|
||||
switch sp.(type) {
|
||||
case *spec.Schema:
|
||||
case *spec.Swagger:
|
||||
default:
|
||||
panic("unexpected type used in getPointerFromKey")
|
||||
}
|
||||
debugLog("updating ref for %s with %s", key, ref.String())
|
||||
pth, value, err := getPointerFromKey(sp, key)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch refable := value.(type) {
|
||||
case *spec.Schema:
|
||||
refable.Ref = ref
|
||||
case *spec.SchemaOrArray:
|
||||
if refable.Schema != nil {
|
||||
refable.Schema.Ref = ref
|
||||
}
|
||||
case *spec.SchemaOrBool:
|
||||
if refable.Schema != nil {
|
||||
refable.Schema.Ref = ref
|
||||
}
|
||||
case spec.Schema:
|
||||
debugLog("rewriting holder for %T", refable)
|
||||
_, entry, pvalue, erp := getParentFromKey(sp, key)
|
||||
if erp != nil {
|
||||
return err
|
||||
}
|
||||
switch container := pvalue.(type) {
|
||||
case spec.Definitions:
|
||||
container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
|
||||
|
||||
case map[string]spec.Schema:
|
||||
container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
|
||||
|
||||
case []spec.Schema:
|
||||
idx, err := strconv.Atoi(entry)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s not a number: %w", pth, err)
|
||||
}
|
||||
container[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
|
||||
|
||||
case *spec.SchemaOrArray:
|
||||
// NOTE: this is necessarily an array - otherwise, the parent would be *Schema
|
||||
idx, err := strconv.Atoi(entry)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s not a number: %w", pth, err)
|
||||
}
|
||||
container.Schemas[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
|
||||
|
||||
case spec.SchemaProperties:
|
||||
container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
|
||||
|
||||
// NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
|
||||
|
||||
default:
|
||||
return fmt.Errorf("unhandled container type at %s: %T", key, value)
|
||||
}
|
||||
|
||||
default:
|
||||
return fmt.Errorf("no schema with ref found at %s for %T", key, value)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// UpdateRefWithSchema replaces a ref with a schema (i.e. re-inline schema)
|
||||
func UpdateRefWithSchema(sp *spec.Swagger, key string, sch *spec.Schema) error {
|
||||
debugLog("updating ref for %s with schema", key)
|
||||
pth, value, err := getPointerFromKey(sp, key)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch refable := value.(type) {
|
||||
case *spec.Schema:
|
||||
*refable = *sch
|
||||
case spec.Schema:
|
||||
_, entry, pvalue, erp := getParentFromKey(sp, key)
|
||||
if erp != nil {
|
||||
return err
|
||||
}
|
||||
switch container := pvalue.(type) {
|
||||
case spec.Definitions:
|
||||
container[entry] = *sch
|
||||
|
||||
case map[string]spec.Schema:
|
||||
container[entry] = *sch
|
||||
|
||||
case []spec.Schema:
|
||||
idx, err := strconv.Atoi(entry)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s not a number: %w", pth, err)
|
||||
}
|
||||
container[idx] = *sch
|
||||
|
||||
case *spec.SchemaOrArray:
|
||||
// NOTE: this is necessarily an array - otherwise, the parent would be *Schema
|
||||
idx, err := strconv.Atoi(entry)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s not a number: %w", pth, err)
|
||||
}
|
||||
container.Schemas[idx] = *sch
|
||||
|
||||
case spec.SchemaProperties:
|
||||
container[entry] = *sch
|
||||
|
||||
// NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
|
||||
|
||||
default:
|
||||
return fmt.Errorf("unhandled type for parent of [%s]: %T", key, value)
|
||||
}
|
||||
case *spec.SchemaOrArray:
|
||||
*refable.Schema = *sch
|
||||
// NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
|
||||
case *spec.SchemaOrBool:
|
||||
*refable.Schema = *sch
|
||||
default:
|
||||
return fmt.Errorf("no schema with ref found at %s for %T", key, value)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// DeepestRefResult holds the results from DeepestRef analysis
|
||||
type DeepestRefResult struct {
|
||||
Ref spec.Ref
|
||||
Schema *spec.Schema
|
||||
Warnings []string
|
||||
}
|
||||
|
||||
// DeepestRef finds the first definition ref, from a cascade of nested refs which are not definitions.
|
||||
// - if no definition is found, returns the deepest ref.
|
||||
// - pointers to external files are expanded
|
||||
//
|
||||
// NOTE: all external $ref's are assumed to be already expanded at this stage.
|
||||
func DeepestRef(sp *spec.Swagger, opts *spec.ExpandOptions, ref spec.Ref) (*DeepestRefResult, error) {
|
||||
if !ref.HasFragmentOnly {
|
||||
// we found an external $ref, which is odd at this stage:
|
||||
// do nothing on external $refs
|
||||
return &DeepestRefResult{Ref: ref}, nil
|
||||
}
|
||||
|
||||
currentRef := ref
|
||||
visited := make(map[string]bool, 64)
|
||||
warnings := make([]string, 0, 2)
|
||||
|
||||
DOWNREF:
|
||||
for currentRef.String() != "" {
|
||||
if path.Dir(currentRef.String()) == definitionsPath {
|
||||
// this is a top-level definition: stop here and return this ref
|
||||
return &DeepestRefResult{Ref: currentRef}, nil
|
||||
}
|
||||
|
||||
if _, beenThere := visited[currentRef.String()]; beenThere {
|
||||
return nil,
|
||||
fmt.Errorf("cannot resolve cyclic chain of pointers under %s", currentRef.String())
|
||||
}
|
||||
|
||||
visited[currentRef.String()] = true
|
||||
value, _, err := currentRef.GetPointer().Get(sp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch refable := value.(type) {
|
||||
case *spec.Schema:
|
||||
if refable.Ref.String() == "" {
|
||||
break DOWNREF
|
||||
}
|
||||
currentRef = refable.Ref
|
||||
|
||||
case spec.Schema:
|
||||
if refable.Ref.String() == "" {
|
||||
break DOWNREF
|
||||
}
|
||||
currentRef = refable.Ref
|
||||
|
||||
case *spec.SchemaOrArray:
|
||||
if refable.Schema == nil || refable.Schema != nil && refable.Schema.Ref.String() == "" {
|
||||
break DOWNREF
|
||||
}
|
||||
currentRef = refable.Schema.Ref
|
||||
|
||||
case *spec.SchemaOrBool:
|
||||
if refable.Schema == nil || refable.Schema != nil && refable.Schema.Ref.String() == "" {
|
||||
break DOWNREF
|
||||
}
|
||||
currentRef = refable.Schema.Ref
|
||||
|
||||
case spec.Response:
|
||||
// a pointer points to a schema initially marshalled in responses section...
|
||||
// Attempt to convert this to a schema. If this fails, the spec is invalid
|
||||
asJSON, _ := refable.MarshalJSON()
|
||||
var asSchema spec.Schema
|
||||
|
||||
err := asSchema.UnmarshalJSON(asJSON)
|
||||
if err != nil {
|
||||
return nil,
|
||||
fmt.Errorf("invalid type for resolved JSON pointer %s. Expected a schema a, got: %T",
|
||||
currentRef.String(), value)
|
||||
}
|
||||
warnings = append(warnings, fmt.Sprintf("found $ref %q (response) interpreted as schema", currentRef.String()))
|
||||
|
||||
if asSchema.Ref.String() == "" {
|
||||
break DOWNREF
|
||||
}
|
||||
currentRef = asSchema.Ref
|
||||
|
||||
case spec.Parameter:
|
||||
// a pointer points to a schema initially marshalled in parameters section...
|
||||
// Attempt to convert this to a schema. If this fails, the spec is invalid
|
||||
asJSON, _ := refable.MarshalJSON()
|
||||
var asSchema spec.Schema
|
||||
if err := asSchema.UnmarshalJSON(asJSON); err != nil {
|
||||
return nil,
|
||||
fmt.Errorf("invalid type for resolved JSON pointer %s. Expected a schema a, got: %T",
|
||||
currentRef.String(), value)
|
||||
}
|
||||
|
||||
warnings = append(warnings, fmt.Sprintf("found $ref %q (parameter) interpreted as schema", currentRef.String()))
|
||||
|
||||
if asSchema.Ref.String() == "" {
|
||||
break DOWNREF
|
||||
}
|
||||
currentRef = asSchema.Ref
|
||||
|
||||
default:
|
||||
return nil,
|
||||
fmt.Errorf("unhandled type to resolve JSON pointer %s. Expected a Schema, got: %T",
|
||||
currentRef.String(), value)
|
||||
}
|
||||
}
|
||||
|
||||
// assess what schema we're ending with
|
||||
sch, erv := spec.ResolveRefWithBase(sp, ¤tRef, opts)
|
||||
if erv != nil {
|
||||
return nil, erv
|
||||
}
|
||||
|
||||
if sch == nil {
|
||||
return nil, fmt.Errorf("no schema found at %s", currentRef.String())
|
||||
}
|
||||
|
||||
return &DeepestRefResult{Ref: currentRef, Schema: sch, Warnings: warnings}, nil
|
||||
}
|
29
vendor/github.com/go-openapi/analysis/internal/flatten/schutils/flatten_schema.go
generated
vendored
Normal file
29
vendor/github.com/go-openapi/analysis/internal/flatten/schutils/flatten_schema.go
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
// Package schutils provides tools to save or clone a schema
|
||||
// when flattening a spec.
|
||||
package schutils
|
||||
|
||||
import (
|
||||
"github.com/go-openapi/spec"
|
||||
"github.com/go-openapi/swag"
|
||||
)
|
||||
|
||||
// Save registers a schema as an entry in spec #/definitions
|
||||
func Save(sp *spec.Swagger, name string, schema *spec.Schema) {
|
||||
if schema == nil {
|
||||
return
|
||||
}
|
||||
|
||||
if sp.Definitions == nil {
|
||||
sp.Definitions = make(map[string]spec.Schema, 150)
|
||||
}
|
||||
|
||||
sp.Definitions[name] = *schema
|
||||
}
|
||||
|
||||
// Clone deep-clones a schema
|
||||
func Clone(schema *spec.Schema) *spec.Schema {
|
||||
var sch spec.Schema
|
||||
_ = swag.FromDynamicJSON(schema, &sch)
|
||||
|
||||
return &sch
|
||||
}
|
201
vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go
generated
vendored
Normal file
201
vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go
generated
vendored
Normal file
@ -0,0 +1,201 @@
|
||||
package sortref
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/go-openapi/jsonpointer"
|
||||
"github.com/go-openapi/spec"
|
||||
)
|
||||
|
||||
const (
|
||||
paths = "paths"
|
||||
responses = "responses"
|
||||
parameters = "parameters"
|
||||
definitions = "definitions"
|
||||
)
|
||||
|
||||
var (
|
||||
ignoredKeys map[string]struct{}
|
||||
validMethods map[string]struct{}
|
||||
)
|
||||
|
||||
func init() {
|
||||
ignoredKeys = map[string]struct{}{
|
||||
"schema": {},
|
||||
"properties": {},
|
||||
"not": {},
|
||||
"anyOf": {},
|
||||
"oneOf": {},
|
||||
}
|
||||
|
||||
validMethods = map[string]struct{}{
|
||||
"GET": {},
|
||||
"HEAD": {},
|
||||
"OPTIONS": {},
|
||||
"PATCH": {},
|
||||
"POST": {},
|
||||
"PUT": {},
|
||||
"DELETE": {},
|
||||
}
|
||||
}
|
||||
|
||||
// Key represent a key item constructed from /-separated segments
|
||||
type Key struct {
|
||||
Segments int
|
||||
Key string
|
||||
}
|
||||
|
||||
// Keys is a sortable collable collection of Keys
|
||||
type Keys []Key
|
||||
|
||||
func (k Keys) Len() int { return len(k) }
|
||||
func (k Keys) Swap(i, j int) { k[i], k[j] = k[j], k[i] }
|
||||
func (k Keys) Less(i, j int) bool {
|
||||
return k[i].Segments > k[j].Segments || (k[i].Segments == k[j].Segments && k[i].Key < k[j].Key)
|
||||
}
|
||||
|
||||
// KeyParts construct a SplitKey with all its /-separated segments decomposed. It is sortable.
|
||||
func KeyParts(key string) SplitKey {
|
||||
var res []string
|
||||
for _, part := range strings.Split(key[1:], "/") {
|
||||
if part != "" {
|
||||
res = append(res, jsonpointer.Unescape(part))
|
||||
}
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
// SplitKey holds of the parts of a /-separated key, soi that their location may be determined.
|
||||
type SplitKey []string
|
||||
|
||||
// IsDefinition is true when the split key is in the #/definitions section of a spec
|
||||
func (s SplitKey) IsDefinition() bool {
|
||||
return len(s) > 1 && s[0] == definitions
|
||||
}
|
||||
|
||||
// DefinitionName yields the name of the definition
|
||||
func (s SplitKey) DefinitionName() string {
|
||||
if !s.IsDefinition() {
|
||||
return ""
|
||||
}
|
||||
|
||||
return s[1]
|
||||
}
|
||||
|
||||
func (s SplitKey) isKeyName(i int) bool {
|
||||
if i <= 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
count := 0
|
||||
for idx := i - 1; idx > 0; idx-- {
|
||||
if s[idx] != "properties" {
|
||||
break
|
||||
}
|
||||
count++
|
||||
}
|
||||
|
||||
return count%2 != 0
|
||||
}
|
||||
|
||||
// PartAdder know how to construct the components of a new name
|
||||
type PartAdder func(string) []string
|
||||
|
||||
// BuildName builds a name from segments
|
||||
func (s SplitKey) BuildName(segments []string, startIndex int, adder PartAdder) string {
|
||||
for i, part := range s[startIndex:] {
|
||||
if _, ignored := ignoredKeys[part]; !ignored || s.isKeyName(startIndex+i) {
|
||||
segments = append(segments, adder(part)...)
|
||||
}
|
||||
}
|
||||
|
||||
return strings.Join(segments, " ")
|
||||
}
|
||||
|
||||
// IsOperation is true when the split key is in the operations section
|
||||
func (s SplitKey) IsOperation() bool {
|
||||
return len(s) > 1 && s[0] == paths
|
||||
}
|
||||
|
||||
// IsSharedOperationParam is true when the split key is in the parameters section of a path
|
||||
func (s SplitKey) IsSharedOperationParam() bool {
|
||||
return len(s) > 2 && s[0] == paths && s[2] == parameters
|
||||
}
|
||||
|
||||
// IsSharedParam is true when the split key is in the #/parameters section of a spec
|
||||
func (s SplitKey) IsSharedParam() bool {
|
||||
return len(s) > 1 && s[0] == parameters
|
||||
}
|
||||
|
||||
// IsOperationParam is true when the split key is in the parameters section of an operation
|
||||
func (s SplitKey) IsOperationParam() bool {
|
||||
return len(s) > 3 && s[0] == paths && s[3] == parameters
|
||||
}
|
||||
|
||||
// IsOperationResponse is true when the split key is in the responses section of an operation
|
||||
func (s SplitKey) IsOperationResponse() bool {
|
||||
return len(s) > 3 && s[0] == paths && s[3] == responses
|
||||
}
|
||||
|
||||
// IsSharedResponse is true when the split key is in the #/responses section of a spec
|
||||
func (s SplitKey) IsSharedResponse() bool {
|
||||
return len(s) > 1 && s[0] == responses
|
||||
}
|
||||
|
||||
// IsDefaultResponse is true when the split key is the default response for an operation
|
||||
func (s SplitKey) IsDefaultResponse() bool {
|
||||
return len(s) > 4 && s[0] == paths && s[3] == responses && s[4] == "default"
|
||||
}
|
||||
|
||||
// IsStatusCodeResponse is true when the split key is an operation response with a status code
|
||||
func (s SplitKey) IsStatusCodeResponse() bool {
|
||||
isInt := func() bool {
|
||||
_, err := strconv.Atoi(s[4])
|
||||
|
||||
return err == nil
|
||||
}
|
||||
|
||||
return len(s) > 4 && s[0] == paths && s[3] == responses && isInt()
|
||||
}
|
||||
|
||||
// ResponseName yields either the status code or "Default" for a response
|
||||
func (s SplitKey) ResponseName() string {
|
||||
if s.IsStatusCodeResponse() {
|
||||
code, _ := strconv.Atoi(s[4])
|
||||
|
||||
return http.StatusText(code)
|
||||
}
|
||||
|
||||
if s.IsDefaultResponse() {
|
||||
return "Default"
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
// PathItemRef constructs a $ref object from a split key of the form /{path}/{method}
|
||||
func (s SplitKey) PathItemRef() spec.Ref {
|
||||
if len(s) < 3 {
|
||||
return spec.Ref{}
|
||||
}
|
||||
|
||||
pth, method := s[1], s[2]
|
||||
if _, isValidMethod := validMethods[strings.ToUpper(method)]; !isValidMethod && !strings.HasPrefix(method, "x-") {
|
||||
return spec.Ref{}
|
||||
}
|
||||
|
||||
return spec.MustCreateRef("#" + path.Join("/", paths, jsonpointer.Escape(pth), strings.ToUpper(method)))
|
||||
}
|
||||
|
||||
// PathRef constructs a $ref object from a split key of the form /paths/{reference}
|
||||
func (s SplitKey) PathRef() spec.Ref {
|
||||
if !s.IsOperation() {
|
||||
return spec.Ref{}
|
||||
}
|
||||
|
||||
return spec.MustCreateRef("#" + path.Join("/", paths, jsonpointer.Escape(s[1])))
|
||||
}
|
141
vendor/github.com/go-openapi/analysis/internal/flatten/sortref/sort_ref.go
generated
vendored
Normal file
141
vendor/github.com/go-openapi/analysis/internal/flatten/sortref/sort_ref.go
generated
vendored
Normal file
@ -0,0 +1,141 @@
|
||||
package sortref
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/go-openapi/analysis/internal/flatten/normalize"
|
||||
"github.com/go-openapi/spec"
|
||||
)
|
||||
|
||||
var depthGroupOrder = []string{
|
||||
"sharedParam", "sharedResponse", "sharedOpParam", "opParam", "codeResponse", "defaultResponse", "definition",
|
||||
}
|
||||
|
||||
type mapIterator struct {
|
||||
len int
|
||||
mapIter *reflect.MapIter
|
||||
}
|
||||
|
||||
func (i *mapIterator) Next() bool {
|
||||
return i.mapIter.Next()
|
||||
}
|
||||
|
||||
func (i *mapIterator) Len() int {
|
||||
return i.len
|
||||
}
|
||||
|
||||
func (i *mapIterator) Key() string {
|
||||
return i.mapIter.Key().String()
|
||||
}
|
||||
|
||||
func mustMapIterator(anyMap interface{}) *mapIterator {
|
||||
val := reflect.ValueOf(anyMap)
|
||||
|
||||
return &mapIterator{mapIter: val.MapRange(), len: val.Len()}
|
||||
}
|
||||
|
||||
// DepthFirst sorts a map of anything. It groups keys by category
|
||||
// (shared params, op param, statuscode response, default response, definitions)
|
||||
// sort groups internally by number of parts in the key and lexical names
|
||||
// flatten groups into a single list of keys
|
||||
func DepthFirst(in interface{}) []string {
|
||||
iterator := mustMapIterator(in)
|
||||
sorted := make([]string, 0, iterator.Len())
|
||||
grouped := make(map[string]Keys, iterator.Len())
|
||||
|
||||
for iterator.Next() {
|
||||
k := iterator.Key()
|
||||
split := KeyParts(k)
|
||||
var pk string
|
||||
|
||||
if split.IsSharedOperationParam() {
|
||||
pk = "sharedOpParam"
|
||||
}
|
||||
if split.IsOperationParam() {
|
||||
pk = "opParam"
|
||||
}
|
||||
if split.IsStatusCodeResponse() {
|
||||
pk = "codeResponse"
|
||||
}
|
||||
if split.IsDefaultResponse() {
|
||||
pk = "defaultResponse"
|
||||
}
|
||||
if split.IsDefinition() {
|
||||
pk = "definition"
|
||||
}
|
||||
if split.IsSharedParam() {
|
||||
pk = "sharedParam"
|
||||
}
|
||||
if split.IsSharedResponse() {
|
||||
pk = "sharedResponse"
|
||||
}
|
||||
grouped[pk] = append(grouped[pk], Key{Segments: len(split), Key: k})
|
||||
}
|
||||
|
||||
for _, pk := range depthGroupOrder {
|
||||
res := grouped[pk]
|
||||
sort.Sort(res)
|
||||
|
||||
for _, v := range res {
|
||||
sorted = append(sorted, v.Key)
|
||||
}
|
||||
}
|
||||
|
||||
return sorted
|
||||
}
|
||||
|
||||
// topMostRefs is able to sort refs by hierarchical then lexicographic order,
|
||||
// yielding refs ordered breadth-first.
|
||||
type topmostRefs []string
|
||||
|
||||
func (k topmostRefs) Len() int { return len(k) }
|
||||
func (k topmostRefs) Swap(i, j int) { k[i], k[j] = k[j], k[i] }
|
||||
func (k topmostRefs) Less(i, j int) bool {
|
||||
li, lj := len(strings.Split(k[i], "/")), len(strings.Split(k[j], "/"))
|
||||
if li == lj {
|
||||
return k[i] < k[j]
|
||||
}
|
||||
|
||||
return li < lj
|
||||
}
|
||||
|
||||
// TopmostFirst sorts references by depth
|
||||
func TopmostFirst(refs []string) []string {
|
||||
res := topmostRefs(refs)
|
||||
sort.Sort(res)
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
// RefRevIdx is a reverse index for references
|
||||
type RefRevIdx struct {
|
||||
Ref spec.Ref
|
||||
Keys []string
|
||||
}
|
||||
|
||||
// ReverseIndex builds a reverse index for references in schemas
|
||||
func ReverseIndex(schemas map[string]spec.Ref, basePath string) map[string]RefRevIdx {
|
||||
collected := make(map[string]RefRevIdx)
|
||||
for key, schRef := range schemas {
|
||||
// normalize paths before sorting,
|
||||
// so we get together keys that are from the same external file
|
||||
normalizedPath := normalize.Path(schRef, basePath)
|
||||
|
||||
entry, ok := collected[normalizedPath]
|
||||
if ok {
|
||||
entry.Keys = append(entry.Keys, key)
|
||||
collected[normalizedPath] = entry
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
collected[normalizedPath] = RefRevIdx{
|
||||
Ref: schRef,
|
||||
Keys: []string{key},
|
||||
}
|
||||
}
|
||||
|
||||
return collected
|
||||
}
|
515
vendor/github.com/go-openapi/analysis/mixin.go
generated
vendored
Normal file
515
vendor/github.com/go-openapi/analysis/mixin.go
generated
vendored
Normal file
@ -0,0 +1,515 @@
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package analysis
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
|
||||
"github.com/go-openapi/spec"
|
||||
)
|
||||
|
||||
// Mixin modifies the primary swagger spec by adding the paths and
|
||||
// definitions from the mixin specs. Top level parameters and
|
||||
// responses from the mixins are also carried over. Operation id
|
||||
// collisions are avoided by appending "Mixin<N>" but only if
|
||||
// needed.
|
||||
//
|
||||
// The following parts of primary are subject to merge, filling empty details
|
||||
// - Info
|
||||
// - BasePath
|
||||
// - Host
|
||||
// - ExternalDocs
|
||||
//
|
||||
// Consider calling FixEmptyResponseDescriptions() on the modified primary
|
||||
// if you read them from storage and they are valid to start with.
|
||||
//
|
||||
// Entries in "paths", "definitions", "parameters" and "responses" are
|
||||
// added to the primary in the order of the given mixins. If the entry
|
||||
// already exists in primary it is skipped with a warning message.
|
||||
//
|
||||
// The count of skipped entries (from collisions) is returned so any
|
||||
// deviation from the number expected can flag a warning in your build
|
||||
// scripts. Carefully review the collisions before accepting them;
|
||||
// consider renaming things if possible.
|
||||
//
|
||||
// No key normalization takes place (paths, type defs,
|
||||
// etc). Ensure they are canonical if your downstream tools do
|
||||
// key normalization of any form.
|
||||
//
|
||||
// Merging schemes (http, https), and consumers/producers do not account for
|
||||
// collisions.
|
||||
func Mixin(primary *spec.Swagger, mixins ...*spec.Swagger) []string {
|
||||
skipped := make([]string, 0, len(mixins))
|
||||
opIds := getOpIds(primary)
|
||||
initPrimary(primary)
|
||||
|
||||
for i, m := range mixins {
|
||||
skipped = append(skipped, mergeSwaggerProps(primary, m)...)
|
||||
|
||||
skipped = append(skipped, mergeConsumes(primary, m)...)
|
||||
|
||||
skipped = append(skipped, mergeProduces(primary, m)...)
|
||||
|
||||
skipped = append(skipped, mergeTags(primary, m)...)
|
||||
|
||||
skipped = append(skipped, mergeSchemes(primary, m)...)
|
||||
|
||||
skipped = append(skipped, mergeSecurityDefinitions(primary, m)...)
|
||||
|
||||
skipped = append(skipped, mergeSecurityRequirements(primary, m)...)
|
||||
|
||||
skipped = append(skipped, mergeDefinitions(primary, m)...)
|
||||
|
||||
// merging paths requires a map of operationIDs to work with
|
||||
skipped = append(skipped, mergePaths(primary, m, opIds, i)...)
|
||||
|
||||
skipped = append(skipped, mergeParameters(primary, m)...)
|
||||
|
||||
skipped = append(skipped, mergeResponses(primary, m)...)
|
||||
}
|
||||
|
||||
return skipped
|
||||
}
|
||||
|
||||
// getOpIds extracts all the paths.<path>.operationIds from the given
|
||||
// spec and returns them as the keys in a map with 'true' values.
|
||||
func getOpIds(s *spec.Swagger) map[string]bool {
|
||||
rv := make(map[string]bool)
|
||||
if s.Paths == nil {
|
||||
return rv
|
||||
}
|
||||
|
||||
for _, v := range s.Paths.Paths {
|
||||
piops := pathItemOps(v)
|
||||
|
||||
for _, op := range piops {
|
||||
rv[op.ID] = true
|
||||
}
|
||||
}
|
||||
|
||||
return rv
|
||||
}
|
||||
|
||||
func pathItemOps(p spec.PathItem) []*spec.Operation {
|
||||
var rv []*spec.Operation
|
||||
rv = appendOp(rv, p.Get)
|
||||
rv = appendOp(rv, p.Put)
|
||||
rv = appendOp(rv, p.Post)
|
||||
rv = appendOp(rv, p.Delete)
|
||||
rv = appendOp(rv, p.Head)
|
||||
rv = appendOp(rv, p.Patch)
|
||||
|
||||
return rv
|
||||
}
|
||||
|
||||
func appendOp(ops []*spec.Operation, op *spec.Operation) []*spec.Operation {
|
||||
if op == nil {
|
||||
return ops
|
||||
}
|
||||
|
||||
return append(ops, op)
|
||||
}
|
||||
|
||||
func mergeSecurityDefinitions(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
|
||||
for k, v := range m.SecurityDefinitions {
|
||||
if _, exists := primary.SecurityDefinitions[k]; exists {
|
||||
warn := fmt.Sprintf(
|
||||
"SecurityDefinitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
|
||||
skipped = append(skipped, warn)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
primary.SecurityDefinitions[k] = v
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func mergeSecurityRequirements(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
|
||||
for _, v := range m.Security {
|
||||
found := false
|
||||
for _, vv := range primary.Security {
|
||||
if reflect.DeepEqual(v, vv) {
|
||||
found = true
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if found {
|
||||
warn := fmt.Sprintf(
|
||||
"Security requirement: '%v' already exists in primary or higher priority mixin, skipping\n", v)
|
||||
skipped = append(skipped, warn)
|
||||
|
||||
continue
|
||||
}
|
||||
primary.Security = append(primary.Security, v)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func mergeDefinitions(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
|
||||
for k, v := range m.Definitions {
|
||||
// assume name collisions represent IDENTICAL type. careful.
|
||||
if _, exists := primary.Definitions[k]; exists {
|
||||
warn := fmt.Sprintf(
|
||||
"definitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
|
||||
skipped = append(skipped, warn)
|
||||
|
||||
continue
|
||||
}
|
||||
primary.Definitions[k] = v
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func mergePaths(primary *spec.Swagger, m *spec.Swagger, opIds map[string]bool, mixIndex int) (skipped []string) {
|
||||
if m.Paths != nil {
|
||||
for k, v := range m.Paths.Paths {
|
||||
if _, exists := primary.Paths.Paths[k]; exists {
|
||||
warn := fmt.Sprintf(
|
||||
"paths entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
|
||||
skipped = append(skipped, warn)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// Swagger requires that operationIds be
|
||||
// unique within a spec. If we find a
|
||||
// collision we append "Mixin0" to the
|
||||
// operatoinId we are adding, where 0 is mixin
|
||||
// index. We assume that operationIds with
|
||||
// all the proivded specs are already unique.
|
||||
piops := pathItemOps(v)
|
||||
for _, piop := range piops {
|
||||
if opIds[piop.ID] {
|
||||
piop.ID = fmt.Sprintf("%v%v%v", piop.ID, "Mixin", mixIndex)
|
||||
}
|
||||
opIds[piop.ID] = true
|
||||
}
|
||||
primary.Paths.Paths[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func mergeParameters(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
|
||||
for k, v := range m.Parameters {
|
||||
// could try to rename on conflict but would
|
||||
// have to fix $refs in the mixin. Complain
|
||||
// for now
|
||||
if _, exists := primary.Parameters[k]; exists {
|
||||
warn := fmt.Sprintf(
|
||||
"top level parameters entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
|
||||
skipped = append(skipped, warn)
|
||||
|
||||
continue
|
||||
}
|
||||
primary.Parameters[k] = v
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func mergeResponses(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
|
||||
for k, v := range m.Responses {
|
||||
// could try to rename on conflict but would
|
||||
// have to fix $refs in the mixin. Complain
|
||||
// for now
|
||||
if _, exists := primary.Responses[k]; exists {
|
||||
warn := fmt.Sprintf(
|
||||
"top level responses entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
|
||||
skipped = append(skipped, warn)
|
||||
|
||||
continue
|
||||
}
|
||||
primary.Responses[k] = v
|
||||
}
|
||||
|
||||
return skipped
|
||||
}
|
||||
|
||||
func mergeConsumes(primary *spec.Swagger, m *spec.Swagger) []string {
|
||||
for _, v := range m.Consumes {
|
||||
found := false
|
||||
for _, vv := range primary.Consumes {
|
||||
if v == vv {
|
||||
found = true
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if found {
|
||||
// no warning here: we just skip it
|
||||
continue
|
||||
}
|
||||
primary.Consumes = append(primary.Consumes, v)
|
||||
}
|
||||
|
||||
return []string{}
|
||||
}
|
||||
|
||||
func mergeProduces(primary *spec.Swagger, m *spec.Swagger) []string {
|
||||
for _, v := range m.Produces {
|
||||
found := false
|
||||
for _, vv := range primary.Produces {
|
||||
if v == vv {
|
||||
found = true
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if found {
|
||||
// no warning here: we just skip it
|
||||
continue
|
||||
}
|
||||
primary.Produces = append(primary.Produces, v)
|
||||
}
|
||||
|
||||
return []string{}
|
||||
}
|
||||
|
||||
func mergeTags(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
|
||||
for _, v := range m.Tags {
|
||||
found := false
|
||||
for _, vv := range primary.Tags {
|
||||
if v.Name == vv.Name {
|
||||
found = true
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if found {
|
||||
warn := fmt.Sprintf(
|
||||
"top level tags entry with name '%v' already exists in primary or higher priority mixin, skipping\n",
|
||||
v.Name,
|
||||
)
|
||||
skipped = append(skipped, warn)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
primary.Tags = append(primary.Tags, v)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func mergeSchemes(primary *spec.Swagger, m *spec.Swagger) []string {
|
||||
for _, v := range m.Schemes {
|
||||
found := false
|
||||
for _, vv := range primary.Schemes {
|
||||
if v == vv {
|
||||
found = true
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if found {
|
||||
// no warning here: we just skip it
|
||||
continue
|
||||
}
|
||||
primary.Schemes = append(primary.Schemes, v)
|
||||
}
|
||||
|
||||
return []string{}
|
||||
}
|
||||
|
||||
func mergeSwaggerProps(primary *spec.Swagger, m *spec.Swagger) []string {
|
||||
var skipped, skippedInfo, skippedDocs []string
|
||||
|
||||
primary.Extensions, skipped = mergeExtensions(primary.Extensions, m.Extensions)
|
||||
|
||||
// merging details in swagger top properties
|
||||
if primary.Host == "" {
|
||||
primary.Host = m.Host
|
||||
}
|
||||
|
||||
if primary.BasePath == "" {
|
||||
primary.BasePath = m.BasePath
|
||||
}
|
||||
|
||||
if primary.Info == nil {
|
||||
primary.Info = m.Info
|
||||
} else if m.Info != nil {
|
||||
skippedInfo = mergeInfo(primary.Info, m.Info)
|
||||
skipped = append(skipped, skippedInfo...)
|
||||
}
|
||||
|
||||
if primary.ExternalDocs == nil {
|
||||
primary.ExternalDocs = m.ExternalDocs
|
||||
} else if m != nil {
|
||||
skippedDocs = mergeExternalDocs(primary.ExternalDocs, m.ExternalDocs)
|
||||
skipped = append(skipped, skippedDocs...)
|
||||
}
|
||||
|
||||
return skipped
|
||||
}
|
||||
|
||||
// nolint: unparam
|
||||
func mergeExternalDocs(primary *spec.ExternalDocumentation, m *spec.ExternalDocumentation) []string {
|
||||
if primary.Description == "" {
|
||||
primary.Description = m.Description
|
||||
}
|
||||
|
||||
if primary.URL == "" {
|
||||
primary.URL = m.URL
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func mergeInfo(primary *spec.Info, m *spec.Info) []string {
|
||||
var sk, skipped []string
|
||||
|
||||
primary.Extensions, sk = mergeExtensions(primary.Extensions, m.Extensions)
|
||||
skipped = append(skipped, sk...)
|
||||
|
||||
if primary.Description == "" {
|
||||
primary.Description = m.Description
|
||||
}
|
||||
|
||||
if primary.Title == "" {
|
||||
primary.Description = m.Description
|
||||
}
|
||||
|
||||
if primary.TermsOfService == "" {
|
||||
primary.TermsOfService = m.TermsOfService
|
||||
}
|
||||
|
||||
if primary.Version == "" {
|
||||
primary.Version = m.Version
|
||||
}
|
||||
|
||||
if primary.Contact == nil {
|
||||
primary.Contact = m.Contact
|
||||
} else if m.Contact != nil {
|
||||
var csk []string
|
||||
primary.Contact.Extensions, csk = mergeExtensions(primary.Contact.Extensions, m.Contact.Extensions)
|
||||
skipped = append(skipped, csk...)
|
||||
|
||||
if primary.Contact.Name == "" {
|
||||
primary.Contact.Name = m.Contact.Name
|
||||
}
|
||||
|
||||
if primary.Contact.URL == "" {
|
||||
primary.Contact.URL = m.Contact.URL
|
||||
}
|
||||
|
||||
if primary.Contact.Email == "" {
|
||||
primary.Contact.Email = m.Contact.Email
|
||||
}
|
||||
}
|
||||
|
||||
if primary.License == nil {
|
||||
primary.License = m.License
|
||||
} else if m.License != nil {
|
||||
var lsk []string
|
||||
primary.License.Extensions, lsk = mergeExtensions(primary.License.Extensions, m.License.Extensions)
|
||||
skipped = append(skipped, lsk...)
|
||||
|
||||
if primary.License.Name == "" {
|
||||
primary.License.Name = m.License.Name
|
||||
}
|
||||
|
||||
if primary.License.URL == "" {
|
||||
primary.License.URL = m.License.URL
|
||||
}
|
||||
}
|
||||
|
||||
return skipped
|
||||
}
|
||||
|
||||
func mergeExtensions(primary spec.Extensions, m spec.Extensions) (result spec.Extensions, skipped []string) {
|
||||
if primary == nil {
|
||||
result = m
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if m == nil {
|
||||
result = primary
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
result = primary
|
||||
for k, v := range m {
|
||||
if _, found := primary[k]; found {
|
||||
skipped = append(skipped, k)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
primary[k] = v
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func initPrimary(primary *spec.Swagger) {
|
||||
if primary.SecurityDefinitions == nil {
|
||||
primary.SecurityDefinitions = make(map[string]*spec.SecurityScheme)
|
||||
}
|
||||
|
||||
if primary.Security == nil {
|
||||
primary.Security = make([]map[string][]string, 0, 10)
|
||||
}
|
||||
|
||||
if primary.Produces == nil {
|
||||
primary.Produces = make([]string, 0, 10)
|
||||
}
|
||||
|
||||
if primary.Consumes == nil {
|
||||
primary.Consumes = make([]string, 0, 10)
|
||||
}
|
||||
|
||||
if primary.Tags == nil {
|
||||
primary.Tags = make([]spec.Tag, 0, 10)
|
||||
}
|
||||
|
||||
if primary.Schemes == nil {
|
||||
primary.Schemes = make([]string, 0, 10)
|
||||
}
|
||||
|
||||
if primary.Paths == nil {
|
||||
primary.Paths = &spec.Paths{Paths: make(map[string]spec.PathItem)}
|
||||
}
|
||||
|
||||
if primary.Paths.Paths == nil {
|
||||
primary.Paths.Paths = make(map[string]spec.PathItem)
|
||||
}
|
||||
|
||||
if primary.Definitions == nil {
|
||||
primary.Definitions = make(spec.Definitions)
|
||||
}
|
||||
|
||||
if primary.Parameters == nil {
|
||||
primary.Parameters = make(map[string]spec.Parameter)
|
||||
}
|
||||
|
||||
if primary.Responses == nil {
|
||||
primary.Responses = make(map[string]spec.Response)
|
||||
}
|
||||
}
|
256
vendor/github.com/go-openapi/analysis/schema.go
generated
vendored
Normal file
256
vendor/github.com/go-openapi/analysis/schema.go
generated
vendored
Normal file
@ -0,0 +1,256 @@
|
||||
package analysis
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/go-openapi/spec"
|
||||
"github.com/go-openapi/strfmt"
|
||||
)
|
||||
|
||||
// SchemaOpts configures the schema analyzer
|
||||
type SchemaOpts struct {
|
||||
Schema *spec.Schema
|
||||
Root interface{}
|
||||
BasePath string
|
||||
_ struct{}
|
||||
}
|
||||
|
||||
// Schema analysis, will classify the schema according to known
|
||||
// patterns.
|
||||
func Schema(opts SchemaOpts) (*AnalyzedSchema, error) {
|
||||
if opts.Schema == nil {
|
||||
return nil, fmt.Errorf("no schema to analyze")
|
||||
}
|
||||
|
||||
a := &AnalyzedSchema{
|
||||
schema: opts.Schema,
|
||||
root: opts.Root,
|
||||
basePath: opts.BasePath,
|
||||
}
|
||||
|
||||
a.initializeFlags()
|
||||
a.inferKnownType()
|
||||
a.inferEnum()
|
||||
a.inferBaseType()
|
||||
|
||||
if err := a.inferMap(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := a.inferArray(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
a.inferTuple()
|
||||
|
||||
if err := a.inferFromRef(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
a.inferSimpleSchema()
|
||||
|
||||
return a, nil
|
||||
}
|
||||
|
||||
// AnalyzedSchema indicates what the schema represents
|
||||
type AnalyzedSchema struct {
|
||||
schema *spec.Schema
|
||||
root interface{}
|
||||
basePath string
|
||||
|
||||
hasProps bool
|
||||
hasAllOf bool
|
||||
hasItems bool
|
||||
hasAdditionalProps bool
|
||||
hasAdditionalItems bool
|
||||
hasRef bool
|
||||
|
||||
IsKnownType bool
|
||||
IsSimpleSchema bool
|
||||
IsArray bool
|
||||
IsSimpleArray bool
|
||||
IsMap bool
|
||||
IsSimpleMap bool
|
||||
IsExtendedObject bool
|
||||
IsTuple bool
|
||||
IsTupleWithExtra bool
|
||||
IsBaseType bool
|
||||
IsEnum bool
|
||||
}
|
||||
|
||||
// Inherits copies value fields from other onto this schema
|
||||
func (a *AnalyzedSchema) inherits(other *AnalyzedSchema) {
|
||||
if other == nil {
|
||||
return
|
||||
}
|
||||
a.hasProps = other.hasProps
|
||||
a.hasAllOf = other.hasAllOf
|
||||
a.hasItems = other.hasItems
|
||||
a.hasAdditionalItems = other.hasAdditionalItems
|
||||
a.hasAdditionalProps = other.hasAdditionalProps
|
||||
a.hasRef = other.hasRef
|
||||
|
||||
a.IsKnownType = other.IsKnownType
|
||||
a.IsSimpleSchema = other.IsSimpleSchema
|
||||
a.IsArray = other.IsArray
|
||||
a.IsSimpleArray = other.IsSimpleArray
|
||||
a.IsMap = other.IsMap
|
||||
a.IsSimpleMap = other.IsSimpleMap
|
||||
a.IsExtendedObject = other.IsExtendedObject
|
||||
a.IsTuple = other.IsTuple
|
||||
a.IsTupleWithExtra = other.IsTupleWithExtra
|
||||
a.IsBaseType = other.IsBaseType
|
||||
a.IsEnum = other.IsEnum
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) inferFromRef() error {
|
||||
if a.hasRef {
|
||||
sch := new(spec.Schema)
|
||||
sch.Ref = a.schema.Ref
|
||||
err := spec.ExpandSchema(sch, a.root, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rsch, err := Schema(SchemaOpts{
|
||||
Schema: sch,
|
||||
Root: a.root,
|
||||
BasePath: a.basePath,
|
||||
})
|
||||
if err != nil {
|
||||
// NOTE(fredbi): currently the only cause for errors is
|
||||
// unresolved ref. Since spec.ExpandSchema() expands the
|
||||
// schema recursively, there is no chance to get there,
|
||||
// until we add more causes for error in this schema analysis.
|
||||
return err
|
||||
}
|
||||
a.inherits(rsch)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) inferSimpleSchema() {
|
||||
a.IsSimpleSchema = a.IsKnownType || a.IsSimpleArray || a.IsSimpleMap
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) inferKnownType() {
|
||||
tpe := a.schema.Type
|
||||
format := a.schema.Format
|
||||
a.IsKnownType = tpe.Contains("boolean") ||
|
||||
tpe.Contains("integer") ||
|
||||
tpe.Contains("number") ||
|
||||
tpe.Contains("string") ||
|
||||
(format != "" && strfmt.Default.ContainsName(format)) ||
|
||||
(a.isObjectType() && !a.hasProps && !a.hasAllOf && !a.hasAdditionalProps && !a.hasAdditionalItems)
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) inferMap() error {
|
||||
if !a.isObjectType() {
|
||||
return nil
|
||||
}
|
||||
|
||||
hasExtra := a.hasProps || a.hasAllOf
|
||||
a.IsMap = a.hasAdditionalProps && !hasExtra
|
||||
a.IsExtendedObject = a.hasAdditionalProps && hasExtra
|
||||
|
||||
if !a.IsMap {
|
||||
return nil
|
||||
}
|
||||
|
||||
// maps
|
||||
if a.schema.AdditionalProperties.Schema != nil {
|
||||
msch, err := Schema(SchemaOpts{
|
||||
Schema: a.schema.AdditionalProperties.Schema,
|
||||
Root: a.root,
|
||||
BasePath: a.basePath,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
a.IsSimpleMap = msch.IsSimpleSchema
|
||||
} else if a.schema.AdditionalProperties.Allows {
|
||||
a.IsSimpleMap = true
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) inferArray() error {
|
||||
// an array has Items defined as an object schema, otherwise we qualify this JSON array as a tuple
|
||||
// (yes, even if the Items array contains only one element).
|
||||
// arrays in JSON schema may be unrestricted (i.e no Items specified).
|
||||
// Note that arrays in Swagger MUST have Items. Nonetheless, we analyze unrestricted arrays.
|
||||
//
|
||||
// NOTE: the spec package misses the distinction between:
|
||||
// items: [] and items: {}, so we consider both arrays here.
|
||||
a.IsArray = a.isArrayType() && (a.schema.Items == nil || a.schema.Items.Schemas == nil)
|
||||
if a.IsArray && a.hasItems {
|
||||
if a.schema.Items.Schema != nil {
|
||||
itsch, err := Schema(SchemaOpts{
|
||||
Schema: a.schema.Items.Schema,
|
||||
Root: a.root,
|
||||
BasePath: a.basePath,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
a.IsSimpleArray = itsch.IsSimpleSchema
|
||||
}
|
||||
}
|
||||
|
||||
if a.IsArray && !a.hasItems {
|
||||
a.IsSimpleArray = true
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) inferTuple() {
|
||||
tuple := a.hasItems && a.schema.Items.Schemas != nil
|
||||
a.IsTuple = tuple && !a.hasAdditionalItems
|
||||
a.IsTupleWithExtra = tuple && a.hasAdditionalItems
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) inferBaseType() {
|
||||
if a.isObjectType() {
|
||||
a.IsBaseType = a.schema.Discriminator != ""
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) inferEnum() {
|
||||
a.IsEnum = len(a.schema.Enum) > 0
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) initializeFlags() {
|
||||
a.hasProps = len(a.schema.Properties) > 0
|
||||
a.hasAllOf = len(a.schema.AllOf) > 0
|
||||
a.hasRef = a.schema.Ref.String() != ""
|
||||
|
||||
a.hasItems = a.schema.Items != nil &&
|
||||
(a.schema.Items.Schema != nil || len(a.schema.Items.Schemas) > 0)
|
||||
|
||||
a.hasAdditionalProps = a.schema.AdditionalProperties != nil &&
|
||||
(a.schema.AdditionalProperties.Schema != nil || a.schema.AdditionalProperties.Allows)
|
||||
|
||||
a.hasAdditionalItems = a.schema.AdditionalItems != nil &&
|
||||
(a.schema.AdditionalItems.Schema != nil || a.schema.AdditionalItems.Allows)
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) isObjectType() bool {
|
||||
return !a.hasRef && (a.schema.Type == nil || a.schema.Type.Contains("") || a.schema.Type.Contains("object"))
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) isArrayType() bool {
|
||||
return !a.hasRef && (a.schema.Type != nil && a.schema.Type.Contains("array"))
|
||||
}
|
||||
|
||||
// isAnalyzedAsComplex determines if an analyzed schema is eligible to flattening (i.e. it is "complex").
|
||||
//
|
||||
// Complex means the schema is any of:
|
||||
// - a simple type (primitive)
|
||||
// - an array of something (items are possibly complex ; if this is the case, items will generate a definition)
|
||||
// - a map of something (additionalProperties are possibly complex ; if this is the case, additionalProperties will
|
||||
// generate a definition)
|
||||
func (a *AnalyzedSchema) isAnalyzedAsComplex() bool {
|
||||
return !a.IsSimpleSchema && !a.IsArray && !a.IsMap
|
||||
}
|
1
vendor/github.com/go-openapi/errors/.gitattributes
generated
vendored
Normal file
1
vendor/github.com/go-openapi/errors/.gitattributes
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
*.go text eol=lf
|
2
vendor/github.com/go-openapi/errors/.gitignore
generated
vendored
Normal file
2
vendor/github.com/go-openapi/errors/.gitignore
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
secrets.yml
|
||||
coverage.out
|
48
vendor/github.com/go-openapi/errors/.golangci.yml
generated
vendored
Normal file
48
vendor/github.com/go-openapi/errors/.golangci.yml
generated
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
linters-settings:
|
||||
govet:
|
||||
check-shadowing: true
|
||||
golint:
|
||||
min-confidence: 0
|
||||
gocyclo:
|
||||
min-complexity: 30
|
||||
maligned:
|
||||
suggest-new: true
|
||||
dupl:
|
||||
threshold: 100
|
||||
goconst:
|
||||
min-len: 2
|
||||
min-occurrences: 4
|
||||
linters:
|
||||
enable-all: true
|
||||
disable:
|
||||
- maligned
|
||||
- lll
|
||||
- gochecknoglobals
|
||||
- godox
|
||||
- gocognit
|
||||
- whitespace
|
||||
- wsl
|
||||
- funlen
|
||||
- gochecknoglobals
|
||||
- gochecknoinits
|
||||
- scopelint
|
||||
- wrapcheck
|
||||
- exhaustivestruct
|
||||
- exhaustive
|
||||
- nlreturn
|
||||
- testpackage
|
||||
- gci
|
||||
- gofumpt
|
||||
- goerr113
|
||||
- gomnd
|
||||
- tparallel
|
||||
- nestif
|
||||
- godot
|
||||
- errorlint
|
||||
- paralleltest
|
||||
- tparallel
|
||||
- cyclop
|
||||
- errname
|
||||
- varnamelen
|
||||
- exhaustruct
|
||||
- maintidx
|
74
vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md
generated
vendored
Normal file
74
vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md
generated
vendored
Normal file
@ -0,0 +1,74 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as
|
||||
contributors and maintainers pledge to making participation in our project and
|
||||
our community a harassment-free experience for everyone, regardless of age, body
|
||||
size, disability, ethnicity, gender identity and expression, level of experience,
|
||||
nationality, personal appearance, race, religion, or sexual identity and
|
||||
orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces
|
||||
when an individual is representing the project or its community. Examples of
|
||||
representing a project or community include using an official project e-mail
|
||||
address, posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event. Representation of a project may be
|
||||
further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at ivan+abuse@flanders.co.nz. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||
faith may face temporary or permanent repercussions as determined by other
|
||||
members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available at [http://contributor-covenant.org/version/1/4][version]
|
||||
|
||||
[homepage]: http://contributor-covenant.org
|
||||
[version]: http://contributor-covenant.org/version/1/4/
|
202
vendor/github.com/go-openapi/errors/LICENSE
generated
vendored
Normal file
202
vendor/github.com/go-openapi/errors/LICENSE
generated
vendored
Normal file
@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
11
vendor/github.com/go-openapi/errors/README.md
generated
vendored
Normal file
11
vendor/github.com/go-openapi/errors/README.md
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
# OpenAPI errors
|
||||
|
||||
[](https://travis-ci.org/go-openapi/errors)
|
||||
[](https://codecov.io/gh/go-openapi/errors)
|
||||
[](https://slackin.goswagger.io)
|
||||
[](https://raw.githubusercontent.com/go-openapi/errors/master/LICENSE)
|
||||
[](https://pkg.go.dev/github.com/go-openapi/errors)
|
||||
[](https://golangci.com)
|
||||
[](https://goreportcard.com/report/github.com/go-openapi/errors)
|
||||
|
||||
Shared errors and error interface used throughout the various libraries found in the go-openapi toolkit.
|
182
vendor/github.com/go-openapi/errors/api.go
generated
vendored
Normal file
182
vendor/github.com/go-openapi/errors/api.go
generated
vendored
Normal file
@ -0,0 +1,182 @@
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package errors
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// DefaultHTTPCode is used when the error Code cannot be used as an HTTP code.
|
||||
var DefaultHTTPCode = http.StatusUnprocessableEntity
|
||||
|
||||
// Error represents a error interface all swagger framework errors implement
|
||||
type Error interface {
|
||||
error
|
||||
Code() int32
|
||||
}
|
||||
|
||||
type apiError struct {
|
||||
code int32
|
||||
message string
|
||||
}
|
||||
|
||||
func (a *apiError) Error() string {
|
||||
return a.message
|
||||
}
|
||||
|
||||
func (a *apiError) Code() int32 {
|
||||
return a.code
|
||||
}
|
||||
|
||||
// MarshalJSON implements the JSON encoding interface
|
||||
func (a apiError) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(map[string]interface{}{
|
||||
"code": a.code,
|
||||
"message": a.message,
|
||||
})
|
||||
}
|
||||
|
||||
// New creates a new API error with a code and a message
|
||||
func New(code int32, message string, args ...interface{}) Error {
|
||||
if len(args) > 0 {
|
||||
return &apiError{code, fmt.Sprintf(message, args...)}
|
||||
}
|
||||
return &apiError{code, message}
|
||||
}
|
||||
|
||||
// NotFound creates a new not found error
|
||||
func NotFound(message string, args ...interface{}) Error {
|
||||
if message == "" {
|
||||
message = "Not found"
|
||||
}
|
||||
return New(http.StatusNotFound, fmt.Sprintf(message, args...))
|
||||
}
|
||||
|
||||
// NotImplemented creates a new not implemented error
|
||||
func NotImplemented(message string) Error {
|
||||
return New(http.StatusNotImplemented, message)
|
||||
}
|
||||
|
||||
// MethodNotAllowedError represents an error for when the path matches but the method doesn't
|
||||
type MethodNotAllowedError struct {
|
||||
code int32
|
||||
Allowed []string
|
||||
message string
|
||||
}
|
||||
|
||||
func (m *MethodNotAllowedError) Error() string {
|
||||
return m.message
|
||||
}
|
||||
|
||||
// Code the error code
|
||||
func (m *MethodNotAllowedError) Code() int32 {
|
||||
return m.code
|
||||
}
|
||||
|
||||
// MarshalJSON implements the JSON encoding interface
|
||||
func (m MethodNotAllowedError) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(map[string]interface{}{
|
||||
"code": m.code,
|
||||
"message": m.message,
|
||||
"allowed": m.Allowed,
|
||||
})
|
||||
}
|
||||
|
||||
func errorAsJSON(err Error) []byte {
|
||||
//nolint:errchkjson
|
||||
b, _ := json.Marshal(struct {
|
||||
Code int32 `json:"code"`
|
||||
Message string `json:"message"`
|
||||
}{err.Code(), err.Error()})
|
||||
return b
|
||||
}
|
||||
|
||||
func flattenComposite(errs *CompositeError) *CompositeError {
|
||||
var res []error
|
||||
for _, er := range errs.Errors {
|
||||
switch e := er.(type) {
|
||||
case *CompositeError:
|
||||
if len(e.Errors) > 0 {
|
||||
flat := flattenComposite(e)
|
||||
if len(flat.Errors) > 0 {
|
||||
res = append(res, flat.Errors...)
|
||||
}
|
||||
}
|
||||
default:
|
||||
if e != nil {
|
||||
res = append(res, e)
|
||||
}
|
||||
}
|
||||
}
|
||||
return CompositeValidationError(res...)
|
||||
}
|
||||
|
||||
// MethodNotAllowed creates a new method not allowed error
|
||||
func MethodNotAllowed(requested string, allow []string) Error {
|
||||
msg := fmt.Sprintf("method %s is not allowed, but [%s] are", requested, strings.Join(allow, ","))
|
||||
return &MethodNotAllowedError{code: http.StatusMethodNotAllowed, Allowed: allow, message: msg}
|
||||
}
|
||||
|
||||
// ServeError the error handler interface implementation
|
||||
func ServeError(rw http.ResponseWriter, r *http.Request, err error) {
|
||||
rw.Header().Set("Content-Type", "application/json")
|
||||
switch e := err.(type) {
|
||||
case *CompositeError:
|
||||
er := flattenComposite(e)
|
||||
// strips composite errors to first element only
|
||||
if len(er.Errors) > 0 {
|
||||
ServeError(rw, r, er.Errors[0])
|
||||
} else {
|
||||
// guard against empty CompositeError (invalid construct)
|
||||
ServeError(rw, r, nil)
|
||||
}
|
||||
case *MethodNotAllowedError:
|
||||
rw.Header().Add("Allow", strings.Join(e.Allowed, ","))
|
||||
rw.WriteHeader(asHTTPCode(int(e.Code())))
|
||||
if r == nil || r.Method != http.MethodHead {
|
||||
_, _ = rw.Write(errorAsJSON(e))
|
||||
}
|
||||
case Error:
|
||||
value := reflect.ValueOf(e)
|
||||
if value.Kind() == reflect.Ptr && value.IsNil() {
|
||||
rw.WriteHeader(http.StatusInternalServerError)
|
||||
_, _ = rw.Write(errorAsJSON(New(http.StatusInternalServerError, "Unknown error")))
|
||||
return
|
||||
}
|
||||
rw.WriteHeader(asHTTPCode(int(e.Code())))
|
||||
if r == nil || r.Method != http.MethodHead {
|
||||
_, _ = rw.Write(errorAsJSON(e))
|
||||
}
|
||||
case nil:
|
||||
rw.WriteHeader(http.StatusInternalServerError)
|
||||
_, _ = rw.Write(errorAsJSON(New(http.StatusInternalServerError, "Unknown error")))
|
||||
default:
|
||||
rw.WriteHeader(http.StatusInternalServerError)
|
||||
if r == nil || r.Method != http.MethodHead {
|
||||
_, _ = rw.Write(errorAsJSON(New(http.StatusInternalServerError, err.Error())))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func asHTTPCode(input int) int {
|
||||
if input >= 600 {
|
||||
return DefaultHTTPCode
|
||||
}
|
||||
return input
|
||||
}
|
22
vendor/github.com/go-openapi/errors/auth.go
generated
vendored
Normal file
22
vendor/github.com/go-openapi/errors/auth.go
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package errors
|
||||
|
||||
import "net/http"
|
||||
|
||||
// Unauthenticated returns an unauthenticated error
|
||||
func Unauthenticated(scheme string) Error {
|
||||
return New(http.StatusUnauthorized, "unauthenticated for %s", scheme)
|
||||
}
|
26
vendor/github.com/go-openapi/errors/doc.go
generated
vendored
Normal file
26
vendor/github.com/go-openapi/errors/doc.go
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
/*
|
||||
Package errors provides an Error interface and several concrete types
|
||||
implementing this interface to manage API errors and JSON-schema validation
|
||||
errors.
|
||||
|
||||
A middleware handler ServeError() is provided to serve the errors types
|
||||
it defines.
|
||||
|
||||
It is used throughout the various go-openapi toolkit libraries
|
||||
(https://github.com/go-openapi).
|
||||
*/
|
||||
package errors
|
103
vendor/github.com/go-openapi/errors/headers.go
generated
vendored
Normal file
103
vendor/github.com/go-openapi/errors/headers.go
generated
vendored
Normal file
@ -0,0 +1,103 @@
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package errors
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// Validation represents a failure of a precondition
|
||||
type Validation struct {
|
||||
code int32
|
||||
Name string
|
||||
In string
|
||||
Value interface{}
|
||||
message string
|
||||
Values []interface{}
|
||||
}
|
||||
|
||||
func (e *Validation) Error() string {
|
||||
return e.message
|
||||
}
|
||||
|
||||
// Code the error code
|
||||
func (e *Validation) Code() int32 {
|
||||
return e.code
|
||||
}
|
||||
|
||||
// MarshalJSON implements the JSON encoding interface
|
||||
func (e Validation) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(map[string]interface{}{
|
||||
"code": e.code,
|
||||
"message": e.message,
|
||||
"in": e.In,
|
||||
"name": e.Name,
|
||||
"value": e.Value,
|
||||
"values": e.Values,
|
||||
})
|
||||
}
|
||||
|
||||
// ValidateName sets the name for a validation or updates it for a nested property
|
||||
func (e *Validation) ValidateName(name string) *Validation {
|
||||
if name != "" {
|
||||
if e.Name == "" {
|
||||
e.Name = name
|
||||
e.message = name + e.message
|
||||
} else {
|
||||
e.Name = name + "." + e.Name
|
||||
e.message = name + "." + e.message
|
||||
}
|
||||
}
|
||||
return e
|
||||
}
|
||||
|
||||
const (
|
||||
contentTypeFail = `unsupported media type %q, only %v are allowed`
|
||||
responseFormatFail = `unsupported media type requested, only %v are available`
|
||||
)
|
||||
|
||||
// InvalidContentType error for an invalid content type
|
||||
func InvalidContentType(value string, allowed []string) *Validation {
|
||||
values := make([]interface{}, 0, len(allowed))
|
||||
for _, v := range allowed {
|
||||
values = append(values, v)
|
||||
}
|
||||
return &Validation{
|
||||
code: http.StatusUnsupportedMediaType,
|
||||
Name: "Content-Type",
|
||||
In: "header",
|
||||
Value: value,
|
||||
Values: values,
|
||||
message: fmt.Sprintf(contentTypeFail, value, allowed),
|
||||
}
|
||||
}
|
||||
|
||||
// InvalidResponseFormat error for an unacceptable response format request
|
||||
func InvalidResponseFormat(value string, allowed []string) *Validation {
|
||||
values := make([]interface{}, 0, len(allowed))
|
||||
for _, v := range allowed {
|
||||
values = append(values, v)
|
||||
}
|
||||
return &Validation{
|
||||
code: http.StatusNotAcceptable,
|
||||
Name: "Accept",
|
||||
In: "header",
|
||||
Value: value,
|
||||
Values: values,
|
||||
message: fmt.Sprintf(responseFormatFail, allowed),
|
||||
}
|
||||
}
|
50
vendor/github.com/go-openapi/errors/middleware.go
generated
vendored
Normal file
50
vendor/github.com/go-openapi/errors/middleware.go
generated
vendored
Normal file
@ -0,0 +1,50 @@
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package errors
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// APIVerificationFailed is an error that contains all the missing info for a mismatched section
|
||||
// between the api registrations and the api spec
|
||||
type APIVerificationFailed struct {
|
||||
Section string `json:"section,omitempty"`
|
||||
MissingSpecification []string `json:"missingSpecification,omitempty"`
|
||||
MissingRegistration []string `json:"missingRegistration,omitempty"`
|
||||
}
|
||||
|
||||
func (v *APIVerificationFailed) Error() string {
|
||||
buf := bytes.NewBuffer(nil)
|
||||
|
||||
hasRegMissing := len(v.MissingRegistration) > 0
|
||||
hasSpecMissing := len(v.MissingSpecification) > 0
|
||||
|
||||
if hasRegMissing {
|
||||
buf.WriteString(fmt.Sprintf("missing [%s] %s registrations", strings.Join(v.MissingRegistration, ", "), v.Section))
|
||||
}
|
||||
|
||||
if hasRegMissing && hasSpecMissing {
|
||||
buf.WriteString("\n")
|
||||
}
|
||||
|
||||
if hasSpecMissing {
|
||||
buf.WriteString(fmt.Sprintf("missing from spec file [%s] %s", strings.Join(v.MissingSpecification, ", "), v.Section))
|
||||
}
|
||||
|
||||
return buf.String()
|
||||
}
|
78
vendor/github.com/go-openapi/errors/parsing.go
generated
vendored
Normal file
78
vendor/github.com/go-openapi/errors/parsing.go
generated
vendored
Normal file
@ -0,0 +1,78 @@
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package errors
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// ParseError represents a parsing error
|
||||
type ParseError struct {
|
||||
code int32
|
||||
Name string
|
||||
In string
|
||||
Value string
|
||||
Reason error
|
||||
message string
|
||||
}
|
||||
|
||||
func (e *ParseError) Error() string {
|
||||
return e.message
|
||||
}
|
||||
|
||||
// Code returns the http status code for this error
|
||||
func (e *ParseError) Code() int32 {
|
||||
return e.code
|
||||
}
|
||||
|
||||
// MarshalJSON implements the JSON encoding interface
|
||||
func (e ParseError) MarshalJSON() ([]byte, error) {
|
||||
var reason string
|
||||
if e.Reason != nil {
|
||||
reason = e.Reason.Error()
|
||||
}
|
||||
return json.Marshal(map[string]interface{}{
|
||||
"code": e.code,
|
||||
"message": e.message,
|
||||
"in": e.In,
|
||||
"name": e.Name,
|
||||
"value": e.Value,
|
||||
"reason": reason,
|
||||
})
|
||||
}
|
||||
|
||||
const (
|
||||
parseErrorTemplContent = `parsing %s %s from %q failed, because %s`
|
||||
parseErrorTemplContentNoIn = `parsing %s from %q failed, because %s`
|
||||
)
|
||||
|
||||
// NewParseError creates a new parse error
|
||||
func NewParseError(name, in, value string, reason error) *ParseError {
|
||||
var msg string
|
||||
if in == "" {
|
||||
msg = fmt.Sprintf(parseErrorTemplContentNoIn, name, value, reason)
|
||||
} else {
|
||||
msg = fmt.Sprintf(parseErrorTemplContent, name, in, value, reason)
|
||||
}
|
||||
return &ParseError{
|
||||
code: 400,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: value,
|
||||
Reason: reason,
|
||||
message: msg,
|
||||
}
|
||||
}
|
611
vendor/github.com/go-openapi/errors/schema.go
generated
vendored
Normal file
611
vendor/github.com/go-openapi/errors/schema.go
generated
vendored
Normal file
@ -0,0 +1,611 @@
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package errors
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
invalidType = "%s is an invalid type name"
|
||||
typeFail = "%s in %s must be of type %s"
|
||||
typeFailWithData = "%s in %s must be of type %s: %q"
|
||||
typeFailWithError = "%s in %s must be of type %s, because: %s"
|
||||
requiredFail = "%s in %s is required"
|
||||
readOnlyFail = "%s in %s is readOnly"
|
||||
tooLongMessage = "%s in %s should be at most %d chars long"
|
||||
tooShortMessage = "%s in %s should be at least %d chars long"
|
||||
patternFail = "%s in %s should match '%s'"
|
||||
enumFail = "%s in %s should be one of %v"
|
||||
multipleOfFail = "%s in %s should be a multiple of %v"
|
||||
maxIncFail = "%s in %s should be less than or equal to %v"
|
||||
maxExcFail = "%s in %s should be less than %v"
|
||||
minIncFail = "%s in %s should be greater than or equal to %v"
|
||||
minExcFail = "%s in %s should be greater than %v"
|
||||
uniqueFail = "%s in %s shouldn't contain duplicates"
|
||||
maxItemsFail = "%s in %s should have at most %d items"
|
||||
minItemsFail = "%s in %s should have at least %d items"
|
||||
typeFailNoIn = "%s must be of type %s"
|
||||
typeFailWithDataNoIn = "%s must be of type %s: %q"
|
||||
typeFailWithErrorNoIn = "%s must be of type %s, because: %s"
|
||||
requiredFailNoIn = "%s is required"
|
||||
readOnlyFailNoIn = "%s is readOnly"
|
||||
tooLongMessageNoIn = "%s should be at most %d chars long"
|
||||
tooShortMessageNoIn = "%s should be at least %d chars long"
|
||||
patternFailNoIn = "%s should match '%s'"
|
||||
enumFailNoIn = "%s should be one of %v"
|
||||
multipleOfFailNoIn = "%s should be a multiple of %v"
|
||||
maxIncFailNoIn = "%s should be less than or equal to %v"
|
||||
maxExcFailNoIn = "%s should be less than %v"
|
||||
minIncFailNoIn = "%s should be greater than or equal to %v"
|
||||
minExcFailNoIn = "%s should be greater than %v"
|
||||
uniqueFailNoIn = "%s shouldn't contain duplicates"
|
||||
maxItemsFailNoIn = "%s should have at most %d items"
|
||||
minItemsFailNoIn = "%s should have at least %d items"
|
||||
noAdditionalItems = "%s in %s can't have additional items"
|
||||
noAdditionalItemsNoIn = "%s can't have additional items"
|
||||
tooFewProperties = "%s in %s should have at least %d properties"
|
||||
tooFewPropertiesNoIn = "%s should have at least %d properties"
|
||||
tooManyProperties = "%s in %s should have at most %d properties"
|
||||
tooManyPropertiesNoIn = "%s should have at most %d properties"
|
||||
unallowedProperty = "%s.%s in %s is a forbidden property"
|
||||
unallowedPropertyNoIn = "%s.%s is a forbidden property"
|
||||
failedAllPatternProps = "%s.%s in %s failed all pattern properties"
|
||||
failedAllPatternPropsNoIn = "%s.%s failed all pattern properties"
|
||||
multipleOfMustBePositive = "factor MultipleOf declared for %s must be positive: %v"
|
||||
)
|
||||
|
||||
// All code responses can be used to differentiate errors for different handling
|
||||
// by the consuming program
|
||||
const (
|
||||
// CompositeErrorCode remains 422 for backwards-compatibility
|
||||
// and to separate it from validation errors with cause
|
||||
CompositeErrorCode = 422
|
||||
// InvalidTypeCode is used for any subclass of invalid types
|
||||
InvalidTypeCode = 600 + iota
|
||||
RequiredFailCode
|
||||
TooLongFailCode
|
||||
TooShortFailCode
|
||||
PatternFailCode
|
||||
EnumFailCode
|
||||
MultipleOfFailCode
|
||||
MaxFailCode
|
||||
MinFailCode
|
||||
UniqueFailCode
|
||||
MaxItemsFailCode
|
||||
MinItemsFailCode
|
||||
NoAdditionalItemsCode
|
||||
TooFewPropertiesCode
|
||||
TooManyPropertiesCode
|
||||
UnallowedPropertyCode
|
||||
FailedAllPatternPropsCode
|
||||
MultipleOfMustBePositiveCode
|
||||
ReadOnlyFailCode
|
||||
)
|
||||
|
||||
// CompositeError is an error that groups several errors together
|
||||
type CompositeError struct {
|
||||
Errors []error
|
||||
code int32
|
||||
message string
|
||||
}
|
||||
|
||||
// Code for this error
|
||||
func (c *CompositeError) Code() int32 {
|
||||
return c.code
|
||||
}
|
||||
|
||||
func (c *CompositeError) Error() string {
|
||||
if len(c.Errors) > 0 {
|
||||
msgs := []string{c.message + ":"}
|
||||
for _, e := range c.Errors {
|
||||
msgs = append(msgs, e.Error())
|
||||
}
|
||||
return strings.Join(msgs, "\n")
|
||||
}
|
||||
return c.message
|
||||
}
|
||||
|
||||
// MarshalJSON implements the JSON encoding interface
|
||||
func (c CompositeError) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(map[string]interface{}{
|
||||
"code": c.code,
|
||||
"message": c.message,
|
||||
"errors": c.Errors,
|
||||
})
|
||||
}
|
||||
|
||||
// CompositeValidationError an error to wrap a bunch of other errors
|
||||
func CompositeValidationError(errors ...error) *CompositeError {
|
||||
return &CompositeError{
|
||||
code: CompositeErrorCode,
|
||||
Errors: append([]error{}, errors...),
|
||||
message: "validation failure list",
|
||||
}
|
||||
}
|
||||
|
||||
// ValidateName recursively sets the name for all validations or updates them for nested properties
|
||||
func (c *CompositeError) ValidateName(name string) *CompositeError {
|
||||
for i, e := range c.Errors {
|
||||
if ve, ok := e.(*Validation); ok {
|
||||
c.Errors[i] = ve.ValidateName(name)
|
||||
} else if ce, ok := e.(*CompositeError); ok {
|
||||
c.Errors[i] = ce.ValidateName(name)
|
||||
}
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// FailedAllPatternProperties an error for when the property doesn't match a pattern
|
||||
func FailedAllPatternProperties(name, in, key string) *Validation {
|
||||
msg := fmt.Sprintf(failedAllPatternProps, name, key, in)
|
||||
if in == "" {
|
||||
msg = fmt.Sprintf(failedAllPatternPropsNoIn, name, key)
|
||||
}
|
||||
return &Validation{
|
||||
code: FailedAllPatternPropsCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: key,
|
||||
message: msg,
|
||||
}
|
||||
}
|
||||
|
||||
// PropertyNotAllowed an error for when the property doesn't match a pattern
|
||||
func PropertyNotAllowed(name, in, key string) *Validation {
|
||||
msg := fmt.Sprintf(unallowedProperty, name, key, in)
|
||||
if in == "" {
|
||||
msg = fmt.Sprintf(unallowedPropertyNoIn, name, key)
|
||||
}
|
||||
return &Validation{
|
||||
code: UnallowedPropertyCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: key,
|
||||
message: msg,
|
||||
}
|
||||
}
|
||||
|
||||
// TooFewProperties an error for an object with too few properties
|
||||
func TooFewProperties(name, in string, n int64) *Validation {
|
||||
msg := fmt.Sprintf(tooFewProperties, name, in, n)
|
||||
if in == "" {
|
||||
msg = fmt.Sprintf(tooFewPropertiesNoIn, name, n)
|
||||
}
|
||||
return &Validation{
|
||||
code: TooFewPropertiesCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: n,
|
||||
message: msg,
|
||||
}
|
||||
}
|
||||
|
||||
// TooManyProperties an error for an object with too many properties
|
||||
func TooManyProperties(name, in string, n int64) *Validation {
|
||||
msg := fmt.Sprintf(tooManyProperties, name, in, n)
|
||||
if in == "" {
|
||||
msg = fmt.Sprintf(tooManyPropertiesNoIn, name, n)
|
||||
}
|
||||
return &Validation{
|
||||
code: TooManyPropertiesCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: n,
|
||||
message: msg,
|
||||
}
|
||||
}
|
||||
|
||||
// AdditionalItemsNotAllowed an error for invalid additional items
|
||||
func AdditionalItemsNotAllowed(name, in string) *Validation {
|
||||
msg := fmt.Sprintf(noAdditionalItems, name, in)
|
||||
if in == "" {
|
||||
msg = fmt.Sprintf(noAdditionalItemsNoIn, name)
|
||||
}
|
||||
return &Validation{
|
||||
code: NoAdditionalItemsCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
message: msg,
|
||||
}
|
||||
}
|
||||
|
||||
// InvalidCollectionFormat another flavor of invalid type error
|
||||
func InvalidCollectionFormat(name, in, format string) *Validation {
|
||||
return &Validation{
|
||||
code: InvalidTypeCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: format,
|
||||
message: fmt.Sprintf("the collection format %q is not supported for the %s param %q", format, in, name),
|
||||
}
|
||||
}
|
||||
|
||||
// InvalidTypeName an error for when the type is invalid
|
||||
func InvalidTypeName(typeName string) *Validation {
|
||||
return &Validation{
|
||||
code: InvalidTypeCode,
|
||||
Value: typeName,
|
||||
message: fmt.Sprintf(invalidType, typeName),
|
||||
}
|
||||
}
|
||||
|
||||
// InvalidType creates an error for when the type is invalid
|
||||
func InvalidType(name, in, typeName string, value interface{}) *Validation {
|
||||
var message string
|
||||
|
||||
if in != "" {
|
||||
switch value.(type) {
|
||||
case string:
|
||||
message = fmt.Sprintf(typeFailWithData, name, in, typeName, value)
|
||||
case error:
|
||||
message = fmt.Sprintf(typeFailWithError, name, in, typeName, value)
|
||||
default:
|
||||
message = fmt.Sprintf(typeFail, name, in, typeName)
|
||||
}
|
||||
} else {
|
||||
switch value.(type) {
|
||||
case string:
|
||||
message = fmt.Sprintf(typeFailWithDataNoIn, name, typeName, value)
|
||||
case error:
|
||||
message = fmt.Sprintf(typeFailWithErrorNoIn, name, typeName, value)
|
||||
default:
|
||||
message = fmt.Sprintf(typeFailNoIn, name, typeName)
|
||||
}
|
||||
}
|
||||
|
||||
return &Validation{
|
||||
code: InvalidTypeCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: value,
|
||||
message: message,
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// DuplicateItems error for when an array contains duplicates
|
||||
func DuplicateItems(name, in string) *Validation {
|
||||
msg := fmt.Sprintf(uniqueFail, name, in)
|
||||
if in == "" {
|
||||
msg = fmt.Sprintf(uniqueFailNoIn, name)
|
||||
}
|
||||
return &Validation{
|
||||
code: UniqueFailCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
message: msg,
|
||||
}
|
||||
}
|
||||
|
||||
// TooManyItems error for when an array contains too many items
|
||||
func TooManyItems(name, in string, max int64, value interface{}) *Validation {
|
||||
msg := fmt.Sprintf(maxItemsFail, name, in, max)
|
||||
if in == "" {
|
||||
msg = fmt.Sprintf(maxItemsFailNoIn, name, max)
|
||||
}
|
||||
|
||||
return &Validation{
|
||||
code: MaxItemsFailCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: value,
|
||||
message: msg,
|
||||
}
|
||||
}
|
||||
|
||||
// TooFewItems error for when an array contains too few items
|
||||
func TooFewItems(name, in string, min int64, value interface{}) *Validation {
|
||||
msg := fmt.Sprintf(minItemsFail, name, in, min)
|
||||
if in == "" {
|
||||
msg = fmt.Sprintf(minItemsFailNoIn, name, min)
|
||||
}
|
||||
return &Validation{
|
||||
code: MinItemsFailCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: value,
|
||||
message: msg,
|
||||
}
|
||||
}
|
||||
|
||||
// ExceedsMaximumInt error for when maximum validation fails
|
||||
func ExceedsMaximumInt(name, in string, max int64, exclusive bool, value interface{}) *Validation {
|
||||
var message string
|
||||
if in == "" {
|
||||
m := maxIncFailNoIn
|
||||
if exclusive {
|
||||
m = maxExcFailNoIn
|
||||
}
|
||||
message = fmt.Sprintf(m, name, max)
|
||||
} else {
|
||||
m := maxIncFail
|
||||
if exclusive {
|
||||
m = maxExcFail
|
||||
}
|
||||
message = fmt.Sprintf(m, name, in, max)
|
||||
}
|
||||
return &Validation{
|
||||
code: MaxFailCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: value,
|
||||
message: message,
|
||||
}
|
||||
}
|
||||
|
||||
// ExceedsMaximumUint error for when maximum validation fails
|
||||
func ExceedsMaximumUint(name, in string, max uint64, exclusive bool, value interface{}) *Validation {
|
||||
var message string
|
||||
if in == "" {
|
||||
m := maxIncFailNoIn
|
||||
if exclusive {
|
||||
m = maxExcFailNoIn
|
||||
}
|
||||
message = fmt.Sprintf(m, name, max)
|
||||
} else {
|
||||
m := maxIncFail
|
||||
if exclusive {
|
||||
m = maxExcFail
|
||||
}
|
||||
message = fmt.Sprintf(m, name, in, max)
|
||||
}
|
||||
return &Validation{
|
||||
code: MaxFailCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: value,
|
||||
message: message,
|
||||
}
|
||||
}
|
||||
|
||||
// ExceedsMaximum error for when maximum validation fails
|
||||
func ExceedsMaximum(name, in string, max float64, exclusive bool, value interface{}) *Validation {
|
||||
var message string
|
||||
if in == "" {
|
||||
m := maxIncFailNoIn
|
||||
if exclusive {
|
||||
m = maxExcFailNoIn
|
||||
}
|
||||
message = fmt.Sprintf(m, name, max)
|
||||
} else {
|
||||
m := maxIncFail
|
||||
if exclusive {
|
||||
m = maxExcFail
|
||||
}
|
||||
message = fmt.Sprintf(m, name, in, max)
|
||||
}
|
||||
return &Validation{
|
||||
code: MaxFailCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: value,
|
||||
message: message,
|
||||
}
|
||||
}
|
||||
|
||||
// ExceedsMinimumInt error for when minimum validation fails
|
||||
func ExceedsMinimumInt(name, in string, min int64, exclusive bool, value interface{}) *Validation {
|
||||
var message string
|
||||
if in == "" {
|
||||
m := minIncFailNoIn
|
||||
if exclusive {
|
||||
m = minExcFailNoIn
|
||||
}
|
||||
message = fmt.Sprintf(m, name, min)
|
||||
} else {
|
||||
m := minIncFail
|
||||
if exclusive {
|
||||
m = minExcFail
|
||||
}
|
||||
message = fmt.Sprintf(m, name, in, min)
|
||||
}
|
||||
return &Validation{
|
||||
code: MinFailCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: value,
|
||||
message: message,
|
||||
}
|
||||
}
|
||||
|
||||
// ExceedsMinimumUint error for when minimum validation fails
|
||||
func ExceedsMinimumUint(name, in string, min uint64, exclusive bool, value interface{}) *Validation {
|
||||
var message string
|
||||
if in == "" {
|
||||
m := minIncFailNoIn
|
||||
if exclusive {
|
||||
m = minExcFailNoIn
|
||||
}
|
||||
message = fmt.Sprintf(m, name, min)
|
||||
} else {
|
||||
m := minIncFail
|
||||
if exclusive {
|
||||
m = minExcFail
|
||||
}
|
||||
message = fmt.Sprintf(m, name, in, min)
|
||||
}
|
||||
return &Validation{
|
||||
code: MinFailCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: value,
|
||||
message: message,
|
||||
}
|
||||
}
|
||||
|
||||
// ExceedsMinimum error for when minimum validation fails
|
||||
func ExceedsMinimum(name, in string, min float64, exclusive bool, value interface{}) *Validation {
|
||||
var message string
|
||||
if in == "" {
|
||||
m := minIncFailNoIn
|
||||
if exclusive {
|
||||
m = minExcFailNoIn
|
||||
}
|
||||
message = fmt.Sprintf(m, name, min)
|
||||
} else {
|
||||
m := minIncFail
|
||||
if exclusive {
|
||||
m = minExcFail
|
||||
}
|
||||
message = fmt.Sprintf(m, name, in, min)
|
||||
}
|
||||
return &Validation{
|
||||
code: MinFailCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: value,
|
||||
message: message,
|
||||
}
|
||||
}
|
||||
|
||||
// NotMultipleOf error for when multiple of validation fails
|
||||
func NotMultipleOf(name, in string, multiple, value interface{}) *Validation {
|
||||
var msg string
|
||||
if in == "" {
|
||||
msg = fmt.Sprintf(multipleOfFailNoIn, name, multiple)
|
||||
} else {
|
||||
msg = fmt.Sprintf(multipleOfFail, name, in, multiple)
|
||||
}
|
||||
return &Validation{
|
||||
code: MultipleOfFailCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: value,
|
||||
message: msg,
|
||||
}
|
||||
}
|
||||
|
||||
// EnumFail error for when an enum validation fails
|
||||
func EnumFail(name, in string, value interface{}, values []interface{}) *Validation {
|
||||
var msg string
|
||||
if in == "" {
|
||||
msg = fmt.Sprintf(enumFailNoIn, name, values)
|
||||
} else {
|
||||
msg = fmt.Sprintf(enumFail, name, in, values)
|
||||
}
|
||||
|
||||
return &Validation{
|
||||
code: EnumFailCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: value,
|
||||
Values: values,
|
||||
message: msg,
|
||||
}
|
||||
}
|
||||
|
||||
// Required error for when a value is missing
|
||||
func Required(name, in string, value interface{}) *Validation {
|
||||
var msg string
|
||||
if in == "" {
|
||||
msg = fmt.Sprintf(requiredFailNoIn, name)
|
||||
} else {
|
||||
msg = fmt.Sprintf(requiredFail, name, in)
|
||||
}
|
||||
return &Validation{
|
||||
code: RequiredFailCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: value,
|
||||
message: msg,
|
||||
}
|
||||
}
|
||||
|
||||
// ReadOnly error for when a value is present in request
|
||||
func ReadOnly(name, in string, value interface{}) *Validation {
|
||||
var msg string
|
||||
if in == "" {
|
||||
msg = fmt.Sprintf(readOnlyFailNoIn, name)
|
||||
} else {
|
||||
msg = fmt.Sprintf(readOnlyFail, name, in)
|
||||
}
|
||||
return &Validation{
|
||||
code: ReadOnlyFailCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: value,
|
||||
message: msg,
|
||||
}
|
||||
}
|
||||
|
||||
// TooLong error for when a string is too long
|
||||
func TooLong(name, in string, max int64, value interface{}) *Validation {
|
||||
var msg string
|
||||
if in == "" {
|
||||
msg = fmt.Sprintf(tooLongMessageNoIn, name, max)
|
||||
} else {
|
||||
msg = fmt.Sprintf(tooLongMessage, name, in, max)
|
||||
}
|
||||
return &Validation{
|
||||
code: TooLongFailCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: value,
|
||||
message: msg,
|
||||
}
|
||||
}
|
||||
|
||||
// TooShort error for when a string is too short
|
||||
func TooShort(name, in string, min int64, value interface{}) *Validation {
|
||||
var msg string
|
||||
if in == "" {
|
||||
msg = fmt.Sprintf(tooShortMessageNoIn, name, min)
|
||||
} else {
|
||||
msg = fmt.Sprintf(tooShortMessage, name, in, min)
|
||||
}
|
||||
|
||||
return &Validation{
|
||||
code: TooShortFailCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: value,
|
||||
message: msg,
|
||||
}
|
||||
}
|
||||
|
||||
// FailedPattern error for when a string fails a regex pattern match
|
||||
// the pattern that is returned is the ECMA syntax version of the pattern not the golang version.
|
||||
func FailedPattern(name, in, pattern string, value interface{}) *Validation {
|
||||
var msg string
|
||||
if in == "" {
|
||||
msg = fmt.Sprintf(patternFailNoIn, name, pattern)
|
||||
} else {
|
||||
msg = fmt.Sprintf(patternFail, name, in, pattern)
|
||||
}
|
||||
|
||||
return &Validation{
|
||||
code: PatternFailCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: value,
|
||||
message: msg,
|
||||
}
|
||||
}
|
||||
|
||||
// MultipleOfMustBePositive error for when a
|
||||
// multipleOf factor is negative
|
||||
func MultipleOfMustBePositive(name, in string, factor interface{}) *Validation {
|
||||
return &Validation{
|
||||
code: MultipleOfMustBePositiveCode,
|
||||
Name: name,
|
||||
In: in,
|
||||
Value: factor,
|
||||
message: fmt.Sprintf(multipleOfMustBePositive, name, factor),
|
||||
}
|
||||
}
|
26
vendor/github.com/go-openapi/jsonpointer/.editorconfig
generated
vendored
Normal file
26
vendor/github.com/go-openapi/jsonpointer/.editorconfig
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
# top-most EditorConfig file
|
||||
root = true
|
||||
|
||||
# Unix-style newlines with a newline ending every file
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
# Set default charset
|
||||
[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
|
||||
charset = utf-8
|
||||
|
||||
# Tab indentation (no size specified)
|
||||
[*.go]
|
||||
indent_style = tab
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
# Matches the exact files either package.json or .travis.yml
|
||||
[{package.json,.travis.yml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
1
vendor/github.com/go-openapi/jsonpointer/.gitignore
generated
vendored
Normal file
1
vendor/github.com/go-openapi/jsonpointer/.gitignore
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
secrets.yml
|
15
vendor/github.com/go-openapi/jsonpointer/.travis.yml
generated
vendored
Normal file
15
vendor/github.com/go-openapi/jsonpointer/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
after_success:
|
||||
- bash <(curl -s https://codecov.io/bash)
|
||||
go:
|
||||
- 1.14.x
|
||||
- 1.15.x
|
||||
install:
|
||||
- GO111MODULE=off go get -u gotest.tools/gotestsum
|
||||
env:
|
||||
- GO111MODULE=on
|
||||
language: go
|
||||
notifications:
|
||||
slack:
|
||||
secure: a5VgoiwB1G/AZqzmephPZIhEB9avMlsWSlVnM1dSAtYAwdrQHGTQxAmpOxYIoSPDhWNN5bfZmjd29++UlTwLcHSR+e0kJhH6IfDlsHj/HplNCJ9tyI0zYc7XchtdKgeMxMzBKCzgwFXGSbQGydXTliDNBo0HOzmY3cou/daMFTP60K+offcjS+3LRAYb1EroSRXZqrk1nuF/xDL3792DZUdPMiFR/L/Df6y74D6/QP4sTkTDFQitz4Wy/7jbsfj8dG6qK2zivgV6/l+w4OVjFkxVpPXogDWY10vVXNVynqxfJ7to2d1I9lNCHE2ilBCkWMIPdyJF7hjF8pKW+82yP4EzRh0vu8Xn0HT5MZpQxdRY/YMxNrWaG7SxsoEaO4q5uhgdzAqLYY3TRa7MjIK+7Ur+aqOeTXn6OKwVi0CjvZ6mIU3WUKSwiwkFZMbjRAkSb5CYwMEfGFO/z964xz83qGt6WAtBXNotqCQpTIiKtDHQeLOMfksHImCg6JLhQcWBVxamVgu0G3Pdh8Y6DyPnxraXY95+QDavbjqv7TeYT9T/FNnrkXaTTK0s4iWE5H4ACU0Qvz0wUYgfQrZv0/Hp7V17+rabUwnzYySHCy9SWX/7OV9Cfh31iMp9ZIffr76xmmThtOEqs8TrTtU6BWI3rWwvA9cXQipZTVtL0oswrGw=
|
||||
script:
|
||||
- gotestsum -f short-verbose -- -race -coverprofile=coverage.txt -covermode=atomic ./...
|
74
vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md
generated
vendored
Normal file
74
vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md
generated
vendored
Normal file
@ -0,0 +1,74 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as
|
||||
contributors and maintainers pledge to making participation in our project and
|
||||
our community a harassment-free experience for everyone, regardless of age, body
|
||||
size, disability, ethnicity, gender identity and expression, level of experience,
|
||||
nationality, personal appearance, race, religion, or sexual identity and
|
||||
orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces
|
||||
when an individual is representing the project or its community. Examples of
|
||||
representing a project or community include using an official project e-mail
|
||||
address, posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event. Representation of a project may be
|
||||
further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at ivan+abuse@flanders.co.nz. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||
faith may face temporary or permanent repercussions as determined by other
|
||||
members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available at [http://contributor-covenant.org/version/1/4][version]
|
||||
|
||||
[homepage]: http://contributor-covenant.org
|
||||
[version]: http://contributor-covenant.org/version/1/4/
|
202
vendor/github.com/go-openapi/jsonpointer/LICENSE
generated
vendored
Normal file
202
vendor/github.com/go-openapi/jsonpointer/LICENSE
generated
vendored
Normal file
@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
15
vendor/github.com/go-openapi/jsonpointer/README.md
generated
vendored
Normal file
15
vendor/github.com/go-openapi/jsonpointer/README.md
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
# gojsonpointer [](https://travis-ci.org/go-openapi/jsonpointer) [](https://codecov.io/gh/go-openapi/jsonpointer) [](https://slackin.goswagger.io)
|
||||
|
||||
[](https://raw.githubusercontent.com/go-openapi/jsonpointer/master/LICENSE) [](http://godoc.org/github.com/go-openapi/jsonpointer)
|
||||
An implementation of JSON Pointer - Go language
|
||||
|
||||
## Status
|
||||
Completed YES
|
||||
|
||||
Tested YES
|
||||
|
||||
## References
|
||||
http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07
|
||||
|
||||
### Note
|
||||
The 4.Evaluation part of the previous reference, starting with 'If the currently referenced value is a JSON array, the reference token MUST contain either...' is not implemented.
|
390
vendor/github.com/go-openapi/jsonpointer/pointer.go
generated
vendored
Normal file
390
vendor/github.com/go-openapi/jsonpointer/pointer.go
generated
vendored
Normal file
@ -0,0 +1,390 @@
|
||||
// Copyright 2013 sigu-399 ( https://github.com/sigu-399 )
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// author sigu-399
|
||||
// author-github https://github.com/sigu-399
|
||||
// author-mail sigu.399@gmail.com
|
||||
//
|
||||
// repository-name jsonpointer
|
||||
// repository-desc An implementation of JSON Pointer - Go language
|
||||
//
|
||||
// description Main and unique file.
|
||||
//
|
||||
// created 25-02-2013
|
||||
|
||||
package jsonpointer
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/go-openapi/swag"
|
||||
)
|
||||
|
||||
const (
|
||||
emptyPointer = ``
|
||||
pointerSeparator = `/`
|
||||
|
||||
invalidStart = `JSON pointer must be empty or start with a "` + pointerSeparator
|
||||
)
|
||||
|
||||
var jsonPointableType = reflect.TypeOf(new(JSONPointable)).Elem()
|
||||
var jsonSetableType = reflect.TypeOf(new(JSONSetable)).Elem()
|
||||
|
||||
// JSONPointable is an interface for structs to implement when they need to customize the
|
||||
// json pointer process
|
||||
type JSONPointable interface {
|
||||
JSONLookup(string) (interface{}, error)
|
||||
}
|
||||
|
||||
// JSONSetable is an interface for structs to implement when they need to customize the
|
||||
// json pointer process
|
||||
type JSONSetable interface {
|
||||
JSONSet(string, interface{}) error
|
||||
}
|
||||
|
||||
// New creates a new json pointer for the given string
|
||||
func New(jsonPointerString string) (Pointer, error) {
|
||||
|
||||
var p Pointer
|
||||
err := p.parse(jsonPointerString)
|
||||
return p, err
|
||||
|
||||
}
|
||||
|
||||
// Pointer the json pointer reprsentation
|
||||
type Pointer struct {
|
||||
referenceTokens []string
|
||||
}
|
||||
|
||||
// "Constructor", parses the given string JSON pointer
|
||||
func (p *Pointer) parse(jsonPointerString string) error {
|
||||
|
||||
var err error
|
||||
|
||||
if jsonPointerString != emptyPointer {
|
||||
if !strings.HasPrefix(jsonPointerString, pointerSeparator) {
|
||||
err = errors.New(invalidStart)
|
||||
} else {
|
||||
referenceTokens := strings.Split(jsonPointerString, pointerSeparator)
|
||||
for _, referenceToken := range referenceTokens[1:] {
|
||||
p.referenceTokens = append(p.referenceTokens, referenceToken)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// Get uses the pointer to retrieve a value from a JSON document
|
||||
func (p *Pointer) Get(document interface{}) (interface{}, reflect.Kind, error) {
|
||||
return p.get(document, swag.DefaultJSONNameProvider)
|
||||
}
|
||||
|
||||
// Set uses the pointer to set a value from a JSON document
|
||||
func (p *Pointer) Set(document interface{}, value interface{}) (interface{}, error) {
|
||||
return document, p.set(document, value, swag.DefaultJSONNameProvider)
|
||||
}
|
||||
|
||||
// GetForToken gets a value for a json pointer token 1 level deep
|
||||
func GetForToken(document interface{}, decodedToken string) (interface{}, reflect.Kind, error) {
|
||||
return getSingleImpl(document, decodedToken, swag.DefaultJSONNameProvider)
|
||||
}
|
||||
|
||||
// SetForToken gets a value for a json pointer token 1 level deep
|
||||
func SetForToken(document interface{}, decodedToken string, value interface{}) (interface{}, error) {
|
||||
return document, setSingleImpl(document, value, decodedToken, swag.DefaultJSONNameProvider)
|
||||
}
|
||||
|
||||
func getSingleImpl(node interface{}, decodedToken string, nameProvider *swag.NameProvider) (interface{}, reflect.Kind, error) {
|
||||
rValue := reflect.Indirect(reflect.ValueOf(node))
|
||||
kind := rValue.Kind()
|
||||
|
||||
if rValue.Type().Implements(jsonPointableType) {
|
||||
r, err := node.(JSONPointable).JSONLookup(decodedToken)
|
||||
if err != nil {
|
||||
return nil, kind, err
|
||||
}
|
||||
return r, kind, nil
|
||||
}
|
||||
|
||||
switch kind {
|
||||
case reflect.Struct:
|
||||
nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken)
|
||||
if !ok {
|
||||
return nil, kind, fmt.Errorf("object has no field %q", decodedToken)
|
||||
}
|
||||
fld := rValue.FieldByName(nm)
|
||||
return fld.Interface(), kind, nil
|
||||
|
||||
case reflect.Map:
|
||||
kv := reflect.ValueOf(decodedToken)
|
||||
mv := rValue.MapIndex(kv)
|
||||
|
||||
if mv.IsValid() {
|
||||
return mv.Interface(), kind, nil
|
||||
}
|
||||
return nil, kind, fmt.Errorf("object has no key %q", decodedToken)
|
||||
|
||||
case reflect.Slice:
|
||||
tokenIndex, err := strconv.Atoi(decodedToken)
|
||||
if err != nil {
|
||||
return nil, kind, err
|
||||
}
|
||||
sLength := rValue.Len()
|
||||
if tokenIndex < 0 || tokenIndex >= sLength {
|
||||
return nil, kind, fmt.Errorf("index out of bounds array[0,%d] index '%d'", sLength-1, tokenIndex)
|
||||
}
|
||||
|
||||
elem := rValue.Index(tokenIndex)
|
||||
return elem.Interface(), kind, nil
|
||||
|
||||
default:
|
||||
return nil, kind, fmt.Errorf("invalid token reference %q", decodedToken)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func setSingleImpl(node, data interface{}, decodedToken string, nameProvider *swag.NameProvider) error {
|
||||
rValue := reflect.Indirect(reflect.ValueOf(node))
|
||||
|
||||
if ns, ok := node.(JSONSetable); ok { // pointer impl
|
||||
return ns.JSONSet(decodedToken, data)
|
||||
}
|
||||
|
||||
if rValue.Type().Implements(jsonSetableType) {
|
||||
return node.(JSONSetable).JSONSet(decodedToken, data)
|
||||
}
|
||||
|
||||
switch rValue.Kind() {
|
||||
case reflect.Struct:
|
||||
nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken)
|
||||
if !ok {
|
||||
return fmt.Errorf("object has no field %q", decodedToken)
|
||||
}
|
||||
fld := rValue.FieldByName(nm)
|
||||
if fld.IsValid() {
|
||||
fld.Set(reflect.ValueOf(data))
|
||||
}
|
||||
return nil
|
||||
|
||||
case reflect.Map:
|
||||
kv := reflect.ValueOf(decodedToken)
|
||||
rValue.SetMapIndex(kv, reflect.ValueOf(data))
|
||||
return nil
|
||||
|
||||
case reflect.Slice:
|
||||
tokenIndex, err := strconv.Atoi(decodedToken)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sLength := rValue.Len()
|
||||
if tokenIndex < 0 || tokenIndex >= sLength {
|
||||
return fmt.Errorf("index out of bounds array[0,%d] index '%d'", sLength, tokenIndex)
|
||||
}
|
||||
|
||||
elem := rValue.Index(tokenIndex)
|
||||
if !elem.CanSet() {
|
||||
return fmt.Errorf("can't set slice index %s to %v", decodedToken, data)
|
||||
}
|
||||
elem.Set(reflect.ValueOf(data))
|
||||
return nil
|
||||
|
||||
default:
|
||||
return fmt.Errorf("invalid token reference %q", decodedToken)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (p *Pointer) get(node interface{}, nameProvider *swag.NameProvider) (interface{}, reflect.Kind, error) {
|
||||
|
||||
if nameProvider == nil {
|
||||
nameProvider = swag.DefaultJSONNameProvider
|
||||
}
|
||||
|
||||
kind := reflect.Invalid
|
||||
|
||||
// Full document when empty
|
||||
if len(p.referenceTokens) == 0 {
|
||||
return node, kind, nil
|
||||
}
|
||||
|
||||
for _, token := range p.referenceTokens {
|
||||
|
||||
decodedToken := Unescape(token)
|
||||
|
||||
r, knd, err := getSingleImpl(node, decodedToken, nameProvider)
|
||||
if err != nil {
|
||||
return nil, knd, err
|
||||
}
|
||||
node, kind = r, knd
|
||||
|
||||
}
|
||||
|
||||
rValue := reflect.ValueOf(node)
|
||||
kind = rValue.Kind()
|
||||
|
||||
return node, kind, nil
|
||||
}
|
||||
|
||||
func (p *Pointer) set(node, data interface{}, nameProvider *swag.NameProvider) error {
|
||||
knd := reflect.ValueOf(node).Kind()
|
||||
|
||||
if knd != reflect.Ptr && knd != reflect.Struct && knd != reflect.Map && knd != reflect.Slice && knd != reflect.Array {
|
||||
return fmt.Errorf("only structs, pointers, maps and slices are supported for setting values")
|
||||
}
|
||||
|
||||
if nameProvider == nil {
|
||||
nameProvider = swag.DefaultJSONNameProvider
|
||||
}
|
||||
|
||||
// Full document when empty
|
||||
if len(p.referenceTokens) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
lastI := len(p.referenceTokens) - 1
|
||||
for i, token := range p.referenceTokens {
|
||||
isLastToken := i == lastI
|
||||
decodedToken := Unescape(token)
|
||||
|
||||
if isLastToken {
|
||||
|
||||
return setSingleImpl(node, data, decodedToken, nameProvider)
|
||||
}
|
||||
|
||||
rValue := reflect.Indirect(reflect.ValueOf(node))
|
||||
kind := rValue.Kind()
|
||||
|
||||
if rValue.Type().Implements(jsonPointableType) {
|
||||
r, err := node.(JSONPointable).JSONLookup(decodedToken)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fld := reflect.ValueOf(r)
|
||||
if fld.CanAddr() && fld.Kind() != reflect.Interface && fld.Kind() != reflect.Map && fld.Kind() != reflect.Slice && fld.Kind() != reflect.Ptr {
|
||||
node = fld.Addr().Interface()
|
||||
continue
|
||||
}
|
||||
node = r
|
||||
continue
|
||||
}
|
||||
|
||||
switch kind {
|
||||
case reflect.Struct:
|
||||
nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken)
|
||||
if !ok {
|
||||
return fmt.Errorf("object has no field %q", decodedToken)
|
||||
}
|
||||
fld := rValue.FieldByName(nm)
|
||||
if fld.CanAddr() && fld.Kind() != reflect.Interface && fld.Kind() != reflect.Map && fld.Kind() != reflect.Slice && fld.Kind() != reflect.Ptr {
|
||||
node = fld.Addr().Interface()
|
||||
continue
|
||||
}
|
||||
node = fld.Interface()
|
||||
|
||||
case reflect.Map:
|
||||
kv := reflect.ValueOf(decodedToken)
|
||||
mv := rValue.MapIndex(kv)
|
||||
|
||||
if !mv.IsValid() {
|
||||
return fmt.Errorf("object has no key %q", decodedToken)
|
||||
}
|
||||
if mv.CanAddr() && mv.Kind() != reflect.Interface && mv.Kind() != reflect.Map && mv.Kind() != reflect.Slice && mv.Kind() != reflect.Ptr {
|
||||
node = mv.Addr().Interface()
|
||||
continue
|
||||
}
|
||||
node = mv.Interface()
|
||||
|
||||
case reflect.Slice:
|
||||
tokenIndex, err := strconv.Atoi(decodedToken)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sLength := rValue.Len()
|
||||
if tokenIndex < 0 || tokenIndex >= sLength {
|
||||
return fmt.Errorf("index out of bounds array[0,%d] index '%d'", sLength, tokenIndex)
|
||||
}
|
||||
|
||||
elem := rValue.Index(tokenIndex)
|
||||
if elem.CanAddr() && elem.Kind() != reflect.Interface && elem.Kind() != reflect.Map && elem.Kind() != reflect.Slice && elem.Kind() != reflect.Ptr {
|
||||
node = elem.Addr().Interface()
|
||||
continue
|
||||
}
|
||||
node = elem.Interface()
|
||||
|
||||
default:
|
||||
return fmt.Errorf("invalid token reference %q", decodedToken)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// DecodedTokens returns the decoded tokens
|
||||
func (p *Pointer) DecodedTokens() []string {
|
||||
result := make([]string, 0, len(p.referenceTokens))
|
||||
for _, t := range p.referenceTokens {
|
||||
result = append(result, Unescape(t))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// IsEmpty returns true if this is an empty json pointer
|
||||
// this indicates that it points to the root document
|
||||
func (p *Pointer) IsEmpty() bool {
|
||||
return len(p.referenceTokens) == 0
|
||||
}
|
||||
|
||||
// Pointer to string representation function
|
||||
func (p *Pointer) String() string {
|
||||
|
||||
if len(p.referenceTokens) == 0 {
|
||||
return emptyPointer
|
||||
}
|
||||
|
||||
pointerString := pointerSeparator + strings.Join(p.referenceTokens, pointerSeparator)
|
||||
|
||||
return pointerString
|
||||
}
|
||||
|
||||
// Specific JSON pointer encoding here
|
||||
// ~0 => ~
|
||||
// ~1 => /
|
||||
// ... and vice versa
|
||||
|
||||
const (
|
||||
encRefTok0 = `~0`
|
||||
encRefTok1 = `~1`
|
||||
decRefTok0 = `~`
|
||||
decRefTok1 = `/`
|
||||
)
|
||||
|
||||
// Unescape unescapes a json pointer reference token string to the original representation
|
||||
func Unescape(token string) string {
|
||||
step1 := strings.Replace(token, encRefTok1, decRefTok1, -1)
|
||||
step2 := strings.Replace(step1, encRefTok0, decRefTok0, -1)
|
||||
return step2
|
||||
}
|
||||
|
||||
// Escape escapes a pointer reference token string
|
||||
func Escape(token string) string {
|
||||
step1 := strings.Replace(token, decRefTok0, encRefTok0, -1)
|
||||
step2 := strings.Replace(step1, decRefTok1, encRefTok1, -1)
|
||||
return step2
|
||||
}
|
1
vendor/github.com/go-openapi/jsonreference/.gitignore
generated
vendored
Normal file
1
vendor/github.com/go-openapi/jsonreference/.gitignore
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
secrets.yml
|
41
vendor/github.com/go-openapi/jsonreference/.golangci.yml
generated
vendored
Normal file
41
vendor/github.com/go-openapi/jsonreference/.golangci.yml
generated
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
linters-settings:
|
||||
govet:
|
||||
check-shadowing: true
|
||||
golint:
|
||||
min-confidence: 0
|
||||
gocyclo:
|
||||
min-complexity: 30
|
||||
maligned:
|
||||
suggest-new: true
|
||||
dupl:
|
||||
threshold: 100
|
||||
goconst:
|
||||
min-len: 2
|
||||
min-occurrences: 4
|
||||
linters:
|
||||
enable-all: true
|
||||
disable:
|
||||
- maligned
|
||||
- lll
|
||||
- gochecknoglobals
|
||||
- godox
|
||||
- gocognit
|
||||
- whitespace
|
||||
- wsl
|
||||
- funlen
|
||||
- gochecknoglobals
|
||||
- gochecknoinits
|
||||
- scopelint
|
||||
- wrapcheck
|
||||
- exhaustivestruct
|
||||
- exhaustive
|
||||
- nlreturn
|
||||
- testpackage
|
||||
- gci
|
||||
- gofumpt
|
||||
- goerr113
|
||||
- gomnd
|
||||
- tparallel
|
||||
- nestif
|
||||
- godot
|
||||
- errorlint
|
24
vendor/github.com/go-openapi/jsonreference/.travis.yml
generated
vendored
Normal file
24
vendor/github.com/go-openapi/jsonreference/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
after_success:
|
||||
- bash <(curl -s https://codecov.io/bash)
|
||||
go:
|
||||
- 1.14.x
|
||||
- 1.x
|
||||
install:
|
||||
- go get gotest.tools/gotestsum
|
||||
jobs:
|
||||
include:
|
||||
# include linting job, but only for latest go version and amd64 arch
|
||||
- go: 1.x
|
||||
arch: amd64
|
||||
install:
|
||||
go get github.com/golangci/golangci-lint/cmd/golangci-lint
|
||||
script:
|
||||
- golangci-lint run --new-from-rev master
|
||||
env:
|
||||
- GO111MODULE=on
|
||||
language: go
|
||||
notifications:
|
||||
slack:
|
||||
secure: OpQG/36F7DSF00HLm9WZMhyqFCYYyYTsVDObW226cWiR8PWYiNfLZiSEvIzT1Gx4dDjhigKTIqcLhG34CkL5iNXDjm9Yyo2RYhQPlK8NErNqUEXuBqn4RqYHW48VGhEhOyDd4Ei0E2FN5ZbgpvHgtpkdZ6XDi64r3Ac89isP9aPHXQTuv2Jog6b4/OKKiUTftLcTIst0p4Cp3gqOJWf1wnoj+IadWiECNVQT6zb47IYjtyw6+uV8iUjTzdKcRB6Zc6b4Dq7JAg1Zd7Jfxkql3hlKp4PNlRf9Cy7y5iA3G7MLyg3FcPX5z2kmcyPt2jOTRMBWUJ5zIQpOxizAcN8WsT3WWBL5KbuYK6k0PzujrIDLqdxGpNmjkkMfDBT9cKmZpm2FdW+oZgPFJP+oKmAo4u4KJz/vjiPTXgQlN5bmrLuRMCp+AwC5wkIohTqWZVPE2TK6ZSnMYcg/W39s+RP/9mJoyryAvPSpBOLTI+biCgaUCTOAZxNTWpMFc3tPYntc41WWkdKcooZ9JA5DwfcaVFyTGQ3YXz+HvX6G1z/gW0Q/A4dBi9mj2iE1xm7tRTT+4VQ2AXFvSEI1HJpfPgYnwAtwOD1v3Qm2EUHk9sCdtEDR4wVGEPIVn44GnwFMnGKx9JWppMPYwFu3SVDdHt+E+LOlhZUply11Aa+IVrT2KUQ=
|
||||
script:
|
||||
- gotestsum -f short-verbose -- -race -coverprofile=coverage.txt -covermode=atomic ./...
|
74
vendor/github.com/go-openapi/jsonreference/CODE_OF_CONDUCT.md
generated
vendored
Normal file
74
vendor/github.com/go-openapi/jsonreference/CODE_OF_CONDUCT.md
generated
vendored
Normal file
@ -0,0 +1,74 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as
|
||||
contributors and maintainers pledge to making participation in our project and
|
||||
our community a harassment-free experience for everyone, regardless of age, body
|
||||
size, disability, ethnicity, gender identity and expression, level of experience,
|
||||
nationality, personal appearance, race, religion, or sexual identity and
|
||||
orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces
|
||||
when an individual is representing the project or its community. Examples of
|
||||
representing a project or community include using an official project e-mail
|
||||
address, posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event. Representation of a project may be
|
||||
further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at ivan+abuse@flanders.co.nz. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||
faith may face temporary or permanent repercussions as determined by other
|
||||
members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available at [http://contributor-covenant.org/version/1/4][version]
|
||||
|
||||
[homepage]: http://contributor-covenant.org
|
||||
[version]: http://contributor-covenant.org/version/1/4/
|
202
vendor/github.com/go-openapi/jsonreference/LICENSE
generated
vendored
Normal file
202
vendor/github.com/go-openapi/jsonreference/LICENSE
generated
vendored
Normal file
@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
15
vendor/github.com/go-openapi/jsonreference/README.md
generated
vendored
Normal file
15
vendor/github.com/go-openapi/jsonreference/README.md
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
# gojsonreference [](https://travis-ci.org/go-openapi/jsonreference) [](https://codecov.io/gh/go-openapi/jsonreference) [](https://slackin.goswagger.io)
|
||||
|
||||
[](https://raw.githubusercontent.com/go-openapi/jsonreference/master/LICENSE) [](http://godoc.org/github.com/go-openapi/jsonreference)
|
||||
An implementation of JSON Reference - Go language
|
||||
|
||||
## Status
|
||||
Feature complete. Stable API
|
||||
|
||||
## Dependencies
|
||||
https://github.com/go-openapi/jsonpointer
|
||||
|
||||
## References
|
||||
http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07
|
||||
|
||||
http://tools.ietf.org/html/draft-pbryan-zyp-json-ref-03
|
63
vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go
generated
vendored
Normal file
63
vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go
generated
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
package internal
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultHttpPort = ":80"
|
||||
defaultHttpsPort = ":443"
|
||||
)
|
||||
|
||||
// Regular expressions used by the normalizations
|
||||
var rxPort = regexp.MustCompile(`(:\d+)/?$`)
|
||||
var rxDupSlashes = regexp.MustCompile(`/{2,}`)
|
||||
|
||||
// NormalizeURL will normalize the specified URL
|
||||
// This was added to replace a previous call to the no longer maintained purell library:
|
||||
// The call that was used looked like the following:
|
||||
// url.Parse(purell.NormalizeURL(parsed, purell.FlagsSafe|purell.FlagRemoveDuplicateSlashes))
|
||||
//
|
||||
// To explain all that was included in the call above, purell.FlagsSafe was really just the following:
|
||||
// - FlagLowercaseScheme
|
||||
// - FlagLowercaseHost
|
||||
// - FlagRemoveDefaultPort
|
||||
// - FlagRemoveDuplicateSlashes (and this was mixed in with the |)
|
||||
func NormalizeURL(u *url.URL) {
|
||||
lowercaseScheme(u)
|
||||
lowercaseHost(u)
|
||||
removeDefaultPort(u)
|
||||
removeDuplicateSlashes(u)
|
||||
}
|
||||
|
||||
func lowercaseScheme(u *url.URL) {
|
||||
if len(u.Scheme) > 0 {
|
||||
u.Scheme = strings.ToLower(u.Scheme)
|
||||
}
|
||||
}
|
||||
|
||||
func lowercaseHost(u *url.URL) {
|
||||
if len(u.Host) > 0 {
|
||||
u.Host = strings.ToLower(u.Host)
|
||||
}
|
||||
}
|
||||
|
||||
func removeDefaultPort(u *url.URL) {
|
||||
if len(u.Host) > 0 {
|
||||
scheme := strings.ToLower(u.Scheme)
|
||||
u.Host = rxPort.ReplaceAllStringFunc(u.Host, func(val string) string {
|
||||
if (scheme == "http" && val == defaultHttpPort) || (scheme == "https" && val == defaultHttpsPort) {
|
||||
return ""
|
||||
}
|
||||
return val
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func removeDuplicateSlashes(u *url.URL) {
|
||||
if len(u.Path) > 0 {
|
||||
u.Path = rxDupSlashes.ReplaceAllString(u.Path, "/")
|
||||
}
|
||||
}
|
158
vendor/github.com/go-openapi/jsonreference/reference.go
generated
vendored
Normal file
158
vendor/github.com/go-openapi/jsonreference/reference.go
generated
vendored
Normal file
@ -0,0 +1,158 @@
|
||||
// Copyright 2013 sigu-399 ( https://github.com/sigu-399 )
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// author sigu-399
|
||||
// author-github https://github.com/sigu-399
|
||||
// author-mail sigu.399@gmail.com
|
||||
//
|
||||
// repository-name jsonreference
|
||||
// repository-desc An implementation of JSON Reference - Go language
|
||||
//
|
||||
// description Main and unique file.
|
||||
//
|
||||
// created 26-02-2013
|
||||
|
||||
package jsonreference
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/go-openapi/jsonpointer"
|
||||
"github.com/go-openapi/jsonreference/internal"
|
||||
)
|
||||
|
||||
const (
|
||||
fragmentRune = `#`
|
||||
)
|
||||
|
||||
// New creates a new reference for the given string
|
||||
func New(jsonReferenceString string) (Ref, error) {
|
||||
|
||||
var r Ref
|
||||
err := r.parse(jsonReferenceString)
|
||||
return r, err
|
||||
|
||||
}
|
||||
|
||||
// MustCreateRef parses the ref string and panics when it's invalid.
|
||||
// Use the New method for a version that returns an error
|
||||
func MustCreateRef(ref string) Ref {
|
||||
r, err := New(ref)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// Ref represents a json reference object
|
||||
type Ref struct {
|
||||
referenceURL *url.URL
|
||||
referencePointer jsonpointer.Pointer
|
||||
|
||||
HasFullURL bool
|
||||
HasURLPathOnly bool
|
||||
HasFragmentOnly bool
|
||||
HasFileScheme bool
|
||||
HasFullFilePath bool
|
||||
}
|
||||
|
||||
// GetURL gets the URL for this reference
|
||||
func (r *Ref) GetURL() *url.URL {
|
||||
return r.referenceURL
|
||||
}
|
||||
|
||||
// GetPointer gets the json pointer for this reference
|
||||
func (r *Ref) GetPointer() *jsonpointer.Pointer {
|
||||
return &r.referencePointer
|
||||
}
|
||||
|
||||
// String returns the best version of the url for this reference
|
||||
func (r *Ref) String() string {
|
||||
|
||||
if r.referenceURL != nil {
|
||||
return r.referenceURL.String()
|
||||
}
|
||||
|
||||
if r.HasFragmentOnly {
|
||||
return fragmentRune + r.referencePointer.String()
|
||||
}
|
||||
|
||||
return r.referencePointer.String()
|
||||
}
|
||||
|
||||
// IsRoot returns true if this reference is a root document
|
||||
func (r *Ref) IsRoot() bool {
|
||||
return r.referenceURL != nil &&
|
||||
!r.IsCanonical() &&
|
||||
!r.HasURLPathOnly &&
|
||||
r.referenceURL.Fragment == ""
|
||||
}
|
||||
|
||||
// IsCanonical returns true when this pointer starts with http(s):// or file://
|
||||
func (r *Ref) IsCanonical() bool {
|
||||
return (r.HasFileScheme && r.HasFullFilePath) || (!r.HasFileScheme && r.HasFullURL)
|
||||
}
|
||||
|
||||
// "Constructor", parses the given string JSON reference
|
||||
func (r *Ref) parse(jsonReferenceString string) error {
|
||||
|
||||
parsed, err := url.Parse(jsonReferenceString)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
internal.NormalizeURL(parsed)
|
||||
|
||||
r.referenceURL = parsed
|
||||
refURL := r.referenceURL
|
||||
|
||||
if refURL.Scheme != "" && refURL.Host != "" {
|
||||
r.HasFullURL = true
|
||||
} else {
|
||||
if refURL.Path != "" {
|
||||
r.HasURLPathOnly = true
|
||||
} else if refURL.RawQuery == "" && refURL.Fragment != "" {
|
||||
r.HasFragmentOnly = true
|
||||
}
|
||||
}
|
||||
|
||||
r.HasFileScheme = refURL.Scheme == "file"
|
||||
r.HasFullFilePath = strings.HasPrefix(refURL.Path, "/")
|
||||
|
||||
// invalid json-pointer error means url has no json-pointer fragment. simply ignore error
|
||||
r.referencePointer, _ = jsonpointer.New(refURL.Fragment)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Inherits creates a new reference from a parent and a child
|
||||
// If the child cannot inherit from the parent, an error is returned
|
||||
func (r *Ref) Inherits(child Ref) (*Ref, error) {
|
||||
childURL := child.GetURL()
|
||||
parentURL := r.GetURL()
|
||||
if childURL == nil {
|
||||
return nil, errors.New("child url is nil")
|
||||
}
|
||||
if parentURL == nil {
|
||||
return &child, nil
|
||||
}
|
||||
|
||||
ref, err := New(parentURL.ResolveReference(childURL).String())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &ref, nil
|
||||
}
|
26
vendor/github.com/go-openapi/loads/.editorconfig
generated
vendored
Normal file
26
vendor/github.com/go-openapi/loads/.editorconfig
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
# top-most EditorConfig file
|
||||
root = true
|
||||
|
||||
# Unix-style newlines with a newline ending every file
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
# Set default charset
|
||||
[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
|
||||
charset = utf-8
|
||||
|
||||
# Tab indentation (no size specified)
|
||||
[*.go]
|
||||
indent_style = tab
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
# Matches the exact files either package.json or .travis.yml
|
||||
[{package.json,.travis.yml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
4
vendor/github.com/go-openapi/loads/.gitignore
generated
vendored
Normal file
4
vendor/github.com/go-openapi/loads/.gitignore
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
secrets.yml
|
||||
coverage.out
|
||||
profile.cov
|
||||
profile.out
|
44
vendor/github.com/go-openapi/loads/.golangci.yml
generated
vendored
Normal file
44
vendor/github.com/go-openapi/loads/.golangci.yml
generated
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
linters-settings:
|
||||
govet:
|
||||
check-shadowing: true
|
||||
golint:
|
||||
min-confidence: 0
|
||||
gocyclo:
|
||||
min-complexity: 30
|
||||
maligned:
|
||||
suggest-new: true
|
||||
dupl:
|
||||
threshold: 100
|
||||
goconst:
|
||||
min-len: 2
|
||||
min-occurrences: 4
|
||||
|
||||
linters:
|
||||
enable-all: true
|
||||
disable:
|
||||
- maligned
|
||||
- lll
|
||||
- gochecknoglobals
|
||||
- gochecknoinits
|
||||
- godox
|
||||
- gocognit
|
||||
- whitespace
|
||||
- wsl
|
||||
- funlen
|
||||
- gochecknoglobals
|
||||
- gochecknoinits
|
||||
- scopelint
|
||||
- wrapcheck
|
||||
- exhaustivestruct
|
||||
- exhaustive
|
||||
- nlreturn
|
||||
- testpackage
|
||||
- gci
|
||||
- gofumpt
|
||||
- goerr113
|
||||
- gomnd
|
||||
- tparallel
|
||||
- nestif
|
||||
- godot
|
||||
- errorlint
|
||||
- paralleltest
|
25
vendor/github.com/go-openapi/loads/.travis.yml
generated
vendored
Normal file
25
vendor/github.com/go-openapi/loads/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
after_success:
|
||||
- bash <(curl -s https://codecov.io/bash)
|
||||
go:
|
||||
- 1.16.x
|
||||
- 1.x
|
||||
install:
|
||||
- go get gotest.tools/gotestsum
|
||||
language: go
|
||||
arch:
|
||||
- amd64
|
||||
- ppc64le
|
||||
jobs:
|
||||
include:
|
||||
# include linting job, but only for latest go version and amd64 arch
|
||||
- go: 1.x
|
||||
arch: amd64
|
||||
install:
|
||||
go get github.com/golangci/golangci-lint/cmd/golangci-lint
|
||||
script:
|
||||
- golangci-lint run --new-from-rev master
|
||||
notifications:
|
||||
slack:
|
||||
secure: OxkPwVp35qBTUilgWC8xykSj+sGMcj0h8IIOKD+Rflx2schZVlFfdYdyVBM+s9OqeOfvtuvnR9v1Ye2rPKAvcjWdC4LpRGUsgmItZaI6Um8Aj6+K9udCw5qrtZVfOVmRu8LieH//XznWWKdOultUuniW0MLqw5+II87Gd00RWbCGi0hk0PykHe7uK+PDA2BEbqyZ2WKKYCvfB3j+0nrFOHScXqnh0V05l2E83J4+Sgy1fsPy+1WdX58ZlNBG333ibaC1FS79XvKSmTgKRkx3+YBo97u6ZtUmJa5WZjf2OdLG3KIckGWAv6R5xgxeU31N0Ng8L332w/Edpp2O/M2bZwdnKJ8hJQikXIAQbICbr+lTDzsoNzMdEIYcHpJ5hjPbiUl3Bmd+Jnsjf5McgAZDiWIfpCKZ29tPCEkVwRsOCqkyPRMNMzHHmoja495P5jR+ODS7+J8RFg5xgcnOgpP9D4Wlhztlf5WyZMpkLxTUD+bZq2SRf50HfHFXTkfq22zPl3d1eq0yrLwh/Z/fWKkfb6SyysROL8y6s8u3dpFX1YHSg0BR6i913h4aoZw9B2BG27cafLLTwKYsp2dFo1PWl4O6u9giFJIeqwloZHLKKrwh0cBFhB7RH0I58asxkZpCH6uWjJierahmHe7iS+E6i+9oCHkOZ59hmCYNimIs3hM=
|
||||
script:
|
||||
- gotestsum -f short-verbose -- -race -timeout=20m -coverprofile=coverage.txt -covermode=atomic ./...
|
74
vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md
generated
vendored
Normal file
74
vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md
generated
vendored
Normal file
@ -0,0 +1,74 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as
|
||||
contributors and maintainers pledge to making participation in our project and
|
||||
our community a harassment-free experience for everyone, regardless of age, body
|
||||
size, disability, ethnicity, gender identity and expression, level of experience,
|
||||
nationality, personal appearance, race, religion, or sexual identity and
|
||||
orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces
|
||||
when an individual is representing the project or its community. Examples of
|
||||
representing a project or community include using an official project e-mail
|
||||
address, posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event. Representation of a project may be
|
||||
further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at ivan+abuse@flanders.co.nz. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||
faith may face temporary or permanent repercussions as determined by other
|
||||
members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available at [http://contributor-covenant.org/version/1/4][version]
|
||||
|
||||
[homepage]: http://contributor-covenant.org
|
||||
[version]: http://contributor-covenant.org/version/1/4/
|
202
vendor/github.com/go-openapi/loads/LICENSE
generated
vendored
Normal file
202
vendor/github.com/go-openapi/loads/LICENSE
generated
vendored
Normal file
@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
6
vendor/github.com/go-openapi/loads/README.md
generated
vendored
Normal file
6
vendor/github.com/go-openapi/loads/README.md
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
# Loads OAI specs [](https://travis-ci.org/go-openapi/loads) [](https://codecov.io/gh/go-openapi/loads) [](https://slackin.goswagger.io) [](https://github.com/go-openapi/loads/actions?query=workflow%3A"Go+Test")
|
||||
|
||||
[](https://raw.githubusercontent.com/go-openapi/loads/master/LICENSE) [](http://godoc.org/github.com/go-openapi/loads)
|
||||
[](https://goreportcard.com/report/github.com/go-openapi/loads)
|
||||
|
||||
Loading of OAI specification documents from local or remote locations. Supports JSON and YAML documents.
|
21
vendor/github.com/go-openapi/loads/doc.go
generated
vendored
Normal file
21
vendor/github.com/go-openapi/loads/doc.go
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
/*
|
||||
Package loads provides document loading methods for swagger (OAI) specifications.
|
||||
|
||||
It is used by other go-openapi packages to load and run analysis on local or remote spec documents.
|
||||
|
||||
*/
|
||||
package loads
|
134
vendor/github.com/go-openapi/loads/loaders.go
generated
vendored
Normal file
134
vendor/github.com/go-openapi/loads/loaders.go
generated
vendored
Normal file
@ -0,0 +1,134 @@
|
||||
package loads
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"net/url"
|
||||
|
||||
"github.com/go-openapi/spec"
|
||||
"github.com/go-openapi/swag"
|
||||
)
|
||||
|
||||
var (
|
||||
// Default chain of loaders, defined at the package level.
|
||||
//
|
||||
// By default this matches json and yaml documents.
|
||||
//
|
||||
// May be altered with AddLoader().
|
||||
loaders *loader
|
||||
)
|
||||
|
||||
func init() {
|
||||
jsonLoader := &loader{
|
||||
DocLoaderWithMatch: DocLoaderWithMatch{
|
||||
Match: func(pth string) bool {
|
||||
return true
|
||||
},
|
||||
Fn: JSONDoc,
|
||||
},
|
||||
}
|
||||
|
||||
loaders = jsonLoader.WithHead(&loader{
|
||||
DocLoaderWithMatch: DocLoaderWithMatch{
|
||||
Match: swag.YAMLMatcher,
|
||||
Fn: swag.YAMLDoc,
|
||||
},
|
||||
})
|
||||
|
||||
// sets the global default loader for go-openapi/spec
|
||||
spec.PathLoader = loaders.Load
|
||||
}
|
||||
|
||||
// DocLoader represents a doc loader type
|
||||
type DocLoader func(string) (json.RawMessage, error)
|
||||
|
||||
// DocMatcher represents a predicate to check if a loader matches
|
||||
type DocMatcher func(string) bool
|
||||
|
||||
// DocLoaderWithMatch describes a loading function for a given extension match.
|
||||
type DocLoaderWithMatch struct {
|
||||
Fn DocLoader
|
||||
Match DocMatcher
|
||||
}
|
||||
|
||||
// NewDocLoaderWithMatch builds a DocLoaderWithMatch to be used in load options
|
||||
func NewDocLoaderWithMatch(fn DocLoader, matcher DocMatcher) DocLoaderWithMatch {
|
||||
return DocLoaderWithMatch{
|
||||
Fn: fn,
|
||||
Match: matcher,
|
||||
}
|
||||
}
|
||||
|
||||
type loader struct {
|
||||
DocLoaderWithMatch
|
||||
Next *loader
|
||||
}
|
||||
|
||||
// WithHead adds a loader at the head of the current stack
|
||||
func (l *loader) WithHead(head *loader) *loader {
|
||||
if head == nil {
|
||||
return l
|
||||
}
|
||||
head.Next = l
|
||||
return head
|
||||
}
|
||||
|
||||
// WithNext adds a loader at the trail of the current stack
|
||||
func (l *loader) WithNext(next *loader) *loader {
|
||||
l.Next = next
|
||||
return next
|
||||
}
|
||||
|
||||
// Load the raw document from path
|
||||
func (l *loader) Load(path string) (json.RawMessage, error) {
|
||||
_, erp := url.Parse(path)
|
||||
if erp != nil {
|
||||
return nil, erp
|
||||
}
|
||||
|
||||
var lastErr error = errors.New("no loader matched") // default error if no match was found
|
||||
for ldr := l; ldr != nil; ldr = ldr.Next {
|
||||
if ldr.Match != nil && !ldr.Match(path) {
|
||||
continue
|
||||
}
|
||||
|
||||
// try then move to next one if there is an error
|
||||
b, err := ldr.Fn(path)
|
||||
if err == nil {
|
||||
return b, nil
|
||||
}
|
||||
|
||||
lastErr = err
|
||||
}
|
||||
|
||||
return nil, lastErr
|
||||
}
|
||||
|
||||
// JSONDoc loads a json document from either a file or a remote url
|
||||
func JSONDoc(path string) (json.RawMessage, error) {
|
||||
data, err := swag.LoadFromFileOrHTTP(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return json.RawMessage(data), nil
|
||||
}
|
||||
|
||||
// AddLoader for a document, executed before other previously set loaders.
|
||||
//
|
||||
// This sets the configuration at the package level.
|
||||
//
|
||||
// NOTE:
|
||||
// * this updates the default loader used by github.com/go-openapi/spec
|
||||
// * since this sets package level globals, you shouln't call this concurrently
|
||||
//
|
||||
func AddLoader(predicate DocMatcher, load DocLoader) {
|
||||
loaders = loaders.WithHead(&loader{
|
||||
DocLoaderWithMatch: DocLoaderWithMatch{
|
||||
Match: predicate,
|
||||
Fn: load,
|
||||
},
|
||||
})
|
||||
|
||||
// sets the global default loader for go-openapi/spec
|
||||
spec.PathLoader = loaders.Load
|
||||
}
|
61
vendor/github.com/go-openapi/loads/options.go
generated
vendored
Normal file
61
vendor/github.com/go-openapi/loads/options.go
generated
vendored
Normal file
@ -0,0 +1,61 @@
|
||||
package loads
|
||||
|
||||
type options struct {
|
||||
loader *loader
|
||||
}
|
||||
|
||||
func defaultOptions() *options {
|
||||
return &options{
|
||||
loader: loaders,
|
||||
}
|
||||
}
|
||||
|
||||
func loaderFromOptions(options []LoaderOption) *loader {
|
||||
opts := defaultOptions()
|
||||
for _, apply := range options {
|
||||
apply(opts)
|
||||
}
|
||||
|
||||
return opts.loader
|
||||
}
|
||||
|
||||
// LoaderOption allows to fine-tune the spec loader behavior
|
||||
type LoaderOption func(*options)
|
||||
|
||||
// WithDocLoader sets a custom loader for loading specs
|
||||
func WithDocLoader(l DocLoader) LoaderOption {
|
||||
return func(opt *options) {
|
||||
if l == nil {
|
||||
return
|
||||
}
|
||||
opt.loader = &loader{
|
||||
DocLoaderWithMatch: DocLoaderWithMatch{
|
||||
Fn: l,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WithDocLoaderMatches sets a chain of custom loaders for loading specs
|
||||
// for different extension matches.
|
||||
//
|
||||
// Loaders are executed in the order of provided DocLoaderWithMatch'es.
|
||||
func WithDocLoaderMatches(l ...DocLoaderWithMatch) LoaderOption {
|
||||
return func(opt *options) {
|
||||
var final, prev *loader
|
||||
for _, ldr := range l {
|
||||
if ldr.Fn == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if prev == nil {
|
||||
final = &loader{DocLoaderWithMatch: ldr}
|
||||
prev = final
|
||||
continue
|
||||
}
|
||||
|
||||
prev = prev.WithNext(&loader{DocLoaderWithMatch: ldr})
|
||||
}
|
||||
opt.loader = final
|
||||
}
|
||||
}
|
266
vendor/github.com/go-openapi/loads/spec.go
generated
vendored
Normal file
266
vendor/github.com/go-openapi/loads/spec.go
generated
vendored
Normal file
@ -0,0 +1,266 @@
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package loads
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/gob"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/go-openapi/analysis"
|
||||
"github.com/go-openapi/spec"
|
||||
"github.com/go-openapi/swag"
|
||||
)
|
||||
|
||||
func init() {
|
||||
gob.Register(map[string]interface{}{})
|
||||
gob.Register([]interface{}{})
|
||||
}
|
||||
|
||||
// Document represents a swagger spec document
|
||||
type Document struct {
|
||||
// specAnalyzer
|
||||
Analyzer *analysis.Spec
|
||||
spec *spec.Swagger
|
||||
specFilePath string
|
||||
origSpec *spec.Swagger
|
||||
schema *spec.Schema
|
||||
raw json.RawMessage
|
||||
pathLoader *loader
|
||||
}
|
||||
|
||||
// JSONSpec loads a spec from a json document
|
||||
func JSONSpec(path string, options ...LoaderOption) (*Document, error) {
|
||||
data, err := JSONDoc(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// convert to json
|
||||
return Analyzed(data, "", options...)
|
||||
}
|
||||
|
||||
// Embedded returns a Document based on embedded specs. No analysis is required
|
||||
func Embedded(orig, flat json.RawMessage, options ...LoaderOption) (*Document, error) {
|
||||
var origSpec, flatSpec spec.Swagger
|
||||
if err := json.Unmarshal(orig, &origSpec); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := json.Unmarshal(flat, &flatSpec); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &Document{
|
||||
raw: orig,
|
||||
origSpec: &origSpec,
|
||||
spec: &flatSpec,
|
||||
pathLoader: loaderFromOptions(options),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Spec loads a new spec document from a local or remote path
|
||||
func Spec(path string, options ...LoaderOption) (*Document, error) {
|
||||
|
||||
ldr := loaderFromOptions(options)
|
||||
|
||||
b, err := ldr.Load(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
document, err := Analyzed(b, "", options...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if document != nil {
|
||||
document.specFilePath = path
|
||||
document.pathLoader = ldr
|
||||
}
|
||||
|
||||
return document, err
|
||||
}
|
||||
|
||||
// Analyzed creates a new analyzed spec document for a root json.RawMessage.
|
||||
func Analyzed(data json.RawMessage, version string, options ...LoaderOption) (*Document, error) {
|
||||
if version == "" {
|
||||
version = "2.0"
|
||||
}
|
||||
if version != "2.0" {
|
||||
return nil, fmt.Errorf("spec version %q is not supported", version)
|
||||
}
|
||||
|
||||
raw, err := trimData(data) // trim blanks, then convert yaml docs into json
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
swspec := new(spec.Swagger)
|
||||
if err = json.Unmarshal(raw, swspec); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
origsqspec, err := cloneSpec(swspec)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
d := &Document{
|
||||
Analyzer: analysis.New(swspec),
|
||||
schema: spec.MustLoadSwagger20Schema(),
|
||||
spec: swspec,
|
||||
raw: raw,
|
||||
origSpec: origsqspec,
|
||||
pathLoader: loaderFromOptions(options),
|
||||
}
|
||||
|
||||
return d, nil
|
||||
}
|
||||
|
||||
func trimData(in json.RawMessage) (json.RawMessage, error) {
|
||||
trimmed := bytes.TrimSpace(in)
|
||||
if len(trimmed) == 0 {
|
||||
return in, nil
|
||||
}
|
||||
|
||||
if trimmed[0] == '{' || trimmed[0] == '[' {
|
||||
return trimmed, nil
|
||||
}
|
||||
|
||||
// assume yaml doc: convert it to json
|
||||
yml, err := swag.BytesToYAMLDoc(trimmed)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("analyzed: %v", err)
|
||||
}
|
||||
|
||||
d, err := swag.YAMLToJSON(yml)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("analyzed: %v", err)
|
||||
}
|
||||
|
||||
return d, nil
|
||||
}
|
||||
|
||||
// Expanded expands the ref fields in the spec document and returns a new spec document
|
||||
func (d *Document) Expanded(options ...*spec.ExpandOptions) (*Document, error) {
|
||||
|
||||
swspec := new(spec.Swagger)
|
||||
if err := json.Unmarshal(d.raw, swspec); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var expandOptions *spec.ExpandOptions
|
||||
if len(options) > 0 {
|
||||
expandOptions = options[0]
|
||||
} else {
|
||||
expandOptions = &spec.ExpandOptions{
|
||||
RelativeBase: d.specFilePath,
|
||||
}
|
||||
}
|
||||
|
||||
if expandOptions.PathLoader == nil {
|
||||
if d.pathLoader != nil {
|
||||
// use loader from Document options
|
||||
expandOptions.PathLoader = d.pathLoader.Load
|
||||
} else {
|
||||
// use package level loader
|
||||
expandOptions.PathLoader = loaders.Load
|
||||
}
|
||||
}
|
||||
|
||||
if err := spec.ExpandSpec(swspec, expandOptions); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
dd := &Document{
|
||||
Analyzer: analysis.New(swspec),
|
||||
spec: swspec,
|
||||
specFilePath: d.specFilePath,
|
||||
schema: spec.MustLoadSwagger20Schema(),
|
||||
raw: d.raw,
|
||||
origSpec: d.origSpec,
|
||||
}
|
||||
return dd, nil
|
||||
}
|
||||
|
||||
// BasePath the base path for this spec
|
||||
func (d *Document) BasePath() string {
|
||||
return d.spec.BasePath
|
||||
}
|
||||
|
||||
// Version returns the version of this spec
|
||||
func (d *Document) Version() string {
|
||||
return d.spec.Swagger
|
||||
}
|
||||
|
||||
// Schema returns the swagger 2.0 schema
|
||||
func (d *Document) Schema() *spec.Schema {
|
||||
return d.schema
|
||||
}
|
||||
|
||||
// Spec returns the swagger spec object model
|
||||
func (d *Document) Spec() *spec.Swagger {
|
||||
return d.spec
|
||||
}
|
||||
|
||||
// Host returns the host for the API
|
||||
func (d *Document) Host() string {
|
||||
return d.spec.Host
|
||||
}
|
||||
|
||||
// Raw returns the raw swagger spec as json bytes
|
||||
func (d *Document) Raw() json.RawMessage {
|
||||
return d.raw
|
||||
}
|
||||
|
||||
// OrigSpec yields the original spec
|
||||
func (d *Document) OrigSpec() *spec.Swagger {
|
||||
return d.origSpec
|
||||
}
|
||||
|
||||
// ResetDefinitions gives a shallow copy with the models reset to the original spec
|
||||
func (d *Document) ResetDefinitions() *Document {
|
||||
defs := make(map[string]spec.Schema, len(d.origSpec.Definitions))
|
||||
for k, v := range d.origSpec.Definitions {
|
||||
defs[k] = v
|
||||
}
|
||||
|
||||
d.spec.Definitions = defs
|
||||
return d
|
||||
}
|
||||
|
||||
// Pristine creates a new pristine document instance based on the input data
|
||||
func (d *Document) Pristine() *Document {
|
||||
dd, _ := Analyzed(d.Raw(), d.Version())
|
||||
dd.pathLoader = d.pathLoader
|
||||
return dd
|
||||
}
|
||||
|
||||
// SpecFilePath returns the file path of the spec if one is defined
|
||||
func (d *Document) SpecFilePath() string {
|
||||
return d.specFilePath
|
||||
}
|
||||
|
||||
func cloneSpec(src *spec.Swagger) (*spec.Swagger, error) {
|
||||
var b bytes.Buffer
|
||||
if err := gob.NewEncoder(&b).Encode(src); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var dst spec.Swagger
|
||||
if err := gob.NewDecoder(&b).Decode(&dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &dst, nil
|
||||
}
|
26
vendor/github.com/go-openapi/runtime/.editorconfig
generated
vendored
Normal file
26
vendor/github.com/go-openapi/runtime/.editorconfig
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
# top-most EditorConfig file
|
||||
root = true
|
||||
|
||||
# Unix-style newlines with a newline ending every file
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
# Set default charset
|
||||
[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
|
||||
charset = utf-8
|
||||
|
||||
# Tab indentation (no size specified)
|
||||
[*.go]
|
||||
indent_style = tab
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
# Matches the exact files either package.json or .travis.yml
|
||||
[{package.json,.travis.yml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
1
vendor/github.com/go-openapi/runtime/.gitattributes
generated
vendored
Normal file
1
vendor/github.com/go-openapi/runtime/.gitattributes
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
*.go text eol=lf
|
5
vendor/github.com/go-openapi/runtime/.gitignore
generated
vendored
Normal file
5
vendor/github.com/go-openapi/runtime/.gitignore
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
secrets.yml
|
||||
coverage.out
|
||||
*.cov
|
||||
*.out
|
||||
playground
|
44
vendor/github.com/go-openapi/runtime/.golangci.yml
generated
vendored
Normal file
44
vendor/github.com/go-openapi/runtime/.golangci.yml
generated
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
linters-settings:
|
||||
govet:
|
||||
# Using err repeatedly considered as shadowing.
|
||||
check-shadowing: false
|
||||
golint:
|
||||
min-confidence: 0
|
||||
gocyclo:
|
||||
min-complexity: 30
|
||||
maligned:
|
||||
suggest-new: true
|
||||
dupl:
|
||||
threshold: 100
|
||||
goconst:
|
||||
min-len: 2
|
||||
min-occurrences: 4
|
||||
linters:
|
||||
disable:
|
||||
- maligned
|
||||
- lll
|
||||
- gochecknoglobals
|
||||
- godox
|
||||
- gocognit
|
||||
- whitespace
|
||||
- wsl
|
||||
- funlen
|
||||
- gochecknoglobals
|
||||
- gochecknoinits
|
||||
- scopelint
|
||||
- wrapcheck
|
||||
- exhaustivestruct
|
||||
- exhaustive
|
||||
- nlreturn
|
||||
- testpackage
|
||||
- gci
|
||||
- gofumpt
|
||||
- goerr113
|
||||
- gomnd
|
||||
- tparallel
|
||||
- nestif
|
||||
- godot
|
||||
- errorlint
|
||||
- noctx
|
||||
- interfacer
|
||||
- nilerr
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user