mirror of
https://github.com/kubernetes/client-go.git
synced 2025-06-26 23:17:34 +00:00
Merge pull request #52793 from nikhita/crd-validation-conversion-tests
Automatic merge from submit-queue. If you want to cherry-pick this change to another branch, please follow the instructions <a href="https://github.com/kubernetes/community/blob/master/contributors/devel/cherry-picks.md">here</a>. apiextensions: add round trip tests for CRD schema conversion Follow up test for https://github.com/kubernetes/kubernetes/pull/52281. Need to update go-openapi to get this - https://github.com/go-openapi/spec/pull/31 - in. **Special notes for your reviewer**: The tests won't pass until https://github.com/kubernetes/kubernetes/pull/52281 is merged. **Release note**: ```release-note NONE ``` /cc @sttts Kubernetes-commit: e339400f6f269b9071241b1c0b5f3e7f76bcf994
This commit is contained in:
commit
bee45f3389
160
Godeps/Godeps.json
generated
160
Godeps/Godeps.json
generated
@ -112,11 +112,11 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/go-openapi/spec",
|
"ImportPath": "github.com/go-openapi/spec",
|
||||||
"Rev": "6aced65f8501fe1217321abf0749d354824ba2ff"
|
"Rev": "7abd5745472fff5eb3685386d5fb8bf38683154d"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/go-openapi/swag",
|
"ImportPath": "github.com/go-openapi/swag",
|
||||||
"Rev": "1d0bd113de87027671077d3c71eb3ac5d7dbba72"
|
"Rev": "f3f9494671f93fcff853e3c6e9e948b3eb71e590"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/gogo/protobuf/proto",
|
"ImportPath": "github.com/gogo/protobuf/proto",
|
||||||
@ -240,15 +240,15 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/mailru/easyjson/buffer",
|
"ImportPath": "github.com/mailru/easyjson/buffer",
|
||||||
"Rev": "d5b7844b561a7bc640052f1b935f7b800330d7e0"
|
"Rev": "2f5df55504ebc322e4d52d34df6a1f5b503bf26d"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/mailru/easyjson/jlexer",
|
"ImportPath": "github.com/mailru/easyjson/jlexer",
|
||||||
"Rev": "d5b7844b561a7bc640052f1b935f7b800330d7e0"
|
"Rev": "2f5df55504ebc322e4d52d34df6a1f5b503bf26d"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/mailru/easyjson/jwriter",
|
"ImportPath": "github.com/mailru/easyjson/jwriter",
|
||||||
"Rev": "d5b7844b561a7bc640052f1b935f7b800330d7e0"
|
"Rev": "2f5df55504ebc322e4d52d34df6a1f5b503bf26d"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/peterbourgon/diskv",
|
"ImportPath": "github.com/peterbourgon/diskv",
|
||||||
@ -376,303 +376,303 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/admissionregistration/v1alpha1",
|
"ImportPath": "k8s.io/api/admissionregistration/v1alpha1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/apps/v1",
|
"ImportPath": "k8s.io/api/apps/v1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/apps/v1beta1",
|
"ImportPath": "k8s.io/api/apps/v1beta1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/apps/v1beta2",
|
"ImportPath": "k8s.io/api/apps/v1beta2",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/authentication/v1",
|
"ImportPath": "k8s.io/api/authentication/v1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/authentication/v1beta1",
|
"ImportPath": "k8s.io/api/authentication/v1beta1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/authorization/v1",
|
"ImportPath": "k8s.io/api/authorization/v1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/authorization/v1beta1",
|
"ImportPath": "k8s.io/api/authorization/v1beta1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/autoscaling/v1",
|
"ImportPath": "k8s.io/api/autoscaling/v1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/autoscaling/v2beta1",
|
"ImportPath": "k8s.io/api/autoscaling/v2beta1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/batch/v1",
|
"ImportPath": "k8s.io/api/batch/v1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/batch/v1beta1",
|
"ImportPath": "k8s.io/api/batch/v1beta1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/batch/v2alpha1",
|
"ImportPath": "k8s.io/api/batch/v2alpha1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/certificates/v1beta1",
|
"ImportPath": "k8s.io/api/certificates/v1beta1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/core/v1",
|
"ImportPath": "k8s.io/api/core/v1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/extensions/v1beta1",
|
"ImportPath": "k8s.io/api/extensions/v1beta1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/imagepolicy/v1alpha1",
|
"ImportPath": "k8s.io/api/imagepolicy/v1alpha1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/networking/v1",
|
"ImportPath": "k8s.io/api/networking/v1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/policy/v1beta1",
|
"ImportPath": "k8s.io/api/policy/v1beta1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/rbac/v1",
|
"ImportPath": "k8s.io/api/rbac/v1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/rbac/v1alpha1",
|
"ImportPath": "k8s.io/api/rbac/v1alpha1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/rbac/v1beta1",
|
"ImportPath": "k8s.io/api/rbac/v1beta1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/scheduling/v1alpha1",
|
"ImportPath": "k8s.io/api/scheduling/v1alpha1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/settings/v1alpha1",
|
"ImportPath": "k8s.io/api/settings/v1alpha1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/storage/v1",
|
"ImportPath": "k8s.io/api/storage/v1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/api/storage/v1beta1",
|
"ImportPath": "k8s.io/api/storage/v1beta1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "8d40e6005e31a17866bebd03144e825f9e61bd52"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/api/equality",
|
"ImportPath": "k8s.io/apimachinery/pkg/api/equality",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/api/errors",
|
"ImportPath": "k8s.io/apimachinery/pkg/api/errors",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/api/meta",
|
"ImportPath": "k8s.io/apimachinery/pkg/api/meta",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/api/resource",
|
"ImportPath": "k8s.io/apimachinery/pkg/api/resource",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/apimachinery",
|
"ImportPath": "k8s.io/apimachinery/pkg/apimachinery",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/apimachinery/registered",
|
"ImportPath": "k8s.io/apimachinery/pkg/apimachinery/registered",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/apis/meta/internalversion",
|
"ImportPath": "k8s.io/apimachinery/pkg/apis/meta/internalversion",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/apis/meta/v1",
|
"ImportPath": "k8s.io/apimachinery/pkg/apis/meta/v1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured",
|
"ImportPath": "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/apis/meta/v1alpha1",
|
"ImportPath": "k8s.io/apimachinery/pkg/apis/meta/v1alpha1",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/conversion",
|
"ImportPath": "k8s.io/apimachinery/pkg/conversion",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/conversion/queryparams",
|
"ImportPath": "k8s.io/apimachinery/pkg/conversion/queryparams",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/conversion/unstructured",
|
"ImportPath": "k8s.io/apimachinery/pkg/conversion/unstructured",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/fields",
|
"ImportPath": "k8s.io/apimachinery/pkg/fields",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/labels",
|
"ImportPath": "k8s.io/apimachinery/pkg/labels",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/runtime",
|
"ImportPath": "k8s.io/apimachinery/pkg/runtime",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/runtime/schema",
|
"ImportPath": "k8s.io/apimachinery/pkg/runtime/schema",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/runtime/serializer",
|
"ImportPath": "k8s.io/apimachinery/pkg/runtime/serializer",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/runtime/serializer/json",
|
"ImportPath": "k8s.io/apimachinery/pkg/runtime/serializer/json",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/runtime/serializer/protobuf",
|
"ImportPath": "k8s.io/apimachinery/pkg/runtime/serializer/protobuf",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/runtime/serializer/recognizer",
|
"ImportPath": "k8s.io/apimachinery/pkg/runtime/serializer/recognizer",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/runtime/serializer/streaming",
|
"ImportPath": "k8s.io/apimachinery/pkg/runtime/serializer/streaming",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/runtime/serializer/versioning",
|
"ImportPath": "k8s.io/apimachinery/pkg/runtime/serializer/versioning",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/selection",
|
"ImportPath": "k8s.io/apimachinery/pkg/selection",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/types",
|
"ImportPath": "k8s.io/apimachinery/pkg/types",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/cache",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/cache",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/clock",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/clock",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/diff",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/diff",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/errors",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/errors",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/framer",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/framer",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/httpstream",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/httpstream",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/httpstream/spdy",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/httpstream/spdy",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/intstr",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/intstr",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/json",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/json",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/mergepatch",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/mergepatch",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/net",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/net",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/remotecommand",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/remotecommand",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/runtime",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/runtime",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/sets",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/sets",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/strategicpatch",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/strategicpatch",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/validation",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/validation",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/validation/field",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/validation/field",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/wait",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/wait",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/util/yaml",
|
"ImportPath": "k8s.io/apimachinery/pkg/util/yaml",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/version",
|
"ImportPath": "k8s.io/apimachinery/pkg/version",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/pkg/watch",
|
"ImportPath": "k8s.io/apimachinery/pkg/watch",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/third_party/forked/golang/json",
|
"ImportPath": "k8s.io/apimachinery/third_party/forked/golang/json",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/third_party/forked/golang/netutil",
|
"ImportPath": "k8s.io/apimachinery/third_party/forked/golang/netutil",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/apimachinery/third_party/forked/golang/reflect",
|
"ImportPath": "k8s.io/apimachinery/third_party/forked/golang/reflect",
|
||||||
"Rev": "d1a0f96ae82fe9585ca19eb90ea9c27af5e5406d"
|
"Rev": "a1ea9bc10d36daefc404197d438e033927802735"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "k8s.io/kube-openapi/pkg/common",
|
"ImportPath": "k8s.io/kube-openapi/pkg/common",
|
||||||
|
73
vendor/github.com/mailru/easyjson/buffer/pool.go
generated
vendored
73
vendor/github.com/mailru/easyjson/buffer/pool.go
generated
vendored
@ -179,18 +179,25 @@ func (b *Buffer) DumpTo(w io.Writer) (written int, err error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// BuildBytes creates a single byte slice with all the contents of the buffer. Data is
|
// BuildBytes creates a single byte slice with all the contents of the buffer. Data is
|
||||||
// copied if it does not fit in a single chunk.
|
// copied if it does not fit in a single chunk. You can optionally provide one byte
|
||||||
func (b *Buffer) BuildBytes() []byte {
|
// slice as argument that it will try to reuse.
|
||||||
|
func (b *Buffer) BuildBytes(reuse ...[]byte) []byte {
|
||||||
if len(b.bufs) == 0 {
|
if len(b.bufs) == 0 {
|
||||||
|
|
||||||
ret := b.Buf
|
ret := b.Buf
|
||||||
b.toPool = nil
|
b.toPool = nil
|
||||||
b.Buf = nil
|
b.Buf = nil
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
ret := make([]byte, 0, b.Size())
|
var ret []byte
|
||||||
|
size := b.Size()
|
||||||
|
|
||||||
|
// If we got a buffer as argument and it is big enought, reuse it.
|
||||||
|
if len(reuse) == 1 && cap(reuse[0]) >= size {
|
||||||
|
ret = reuse[0][:0]
|
||||||
|
} else {
|
||||||
|
ret = make([]byte, 0, size)
|
||||||
|
}
|
||||||
for _, buf := range b.bufs {
|
for _, buf := range b.bufs {
|
||||||
ret = append(ret, buf...)
|
ret = append(ret, buf...)
|
||||||
putBuf(buf)
|
putBuf(buf)
|
||||||
@ -205,3 +212,59 @@ func (b *Buffer) BuildBytes() []byte {
|
|||||||
|
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type readCloser struct {
|
||||||
|
offset int
|
||||||
|
bufs [][]byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *readCloser) Read(p []byte) (n int, err error) {
|
||||||
|
for _, buf := range r.bufs {
|
||||||
|
// Copy as much as we can.
|
||||||
|
x := copy(p[n:], buf[r.offset:])
|
||||||
|
n += x // Increment how much we filled.
|
||||||
|
|
||||||
|
// Did we empty the whole buffer?
|
||||||
|
if r.offset+x == len(buf) {
|
||||||
|
// On to the next buffer.
|
||||||
|
r.offset = 0
|
||||||
|
r.bufs = r.bufs[1:]
|
||||||
|
|
||||||
|
// We can release this buffer.
|
||||||
|
putBuf(buf)
|
||||||
|
} else {
|
||||||
|
r.offset += x
|
||||||
|
}
|
||||||
|
|
||||||
|
if n == len(p) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// No buffers left or nothing read?
|
||||||
|
if len(r.bufs) == 0 {
|
||||||
|
err = io.EOF
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *readCloser) Close() error {
|
||||||
|
// Release all remaining buffers.
|
||||||
|
for _, buf := range r.bufs {
|
||||||
|
putBuf(buf)
|
||||||
|
}
|
||||||
|
// In case Close gets called multiple times.
|
||||||
|
r.bufs = nil
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadCloser creates an io.ReadCloser with all the contents of the buffer.
|
||||||
|
func (b *Buffer) ReadCloser() io.ReadCloser {
|
||||||
|
ret := &readCloser{0, append(b.bufs, b.Buf)}
|
||||||
|
|
||||||
|
b.bufs = nil
|
||||||
|
b.toPool = nil
|
||||||
|
b.Buf = nil
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
24
vendor/github.com/mailru/easyjson/jlexer/bytestostr.go
generated
vendored
Normal file
24
vendor/github.com/mailru/easyjson/jlexer/bytestostr.go
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
// This file will only be included to the build if neither
|
||||||
|
// easyjson_nounsafe nor appengine build tag is set. See README notes
|
||||||
|
// for more details.
|
||||||
|
|
||||||
|
//+build !easyjson_nounsafe
|
||||||
|
//+build !appengine
|
||||||
|
|
||||||
|
package jlexer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
|
||||||
|
// bytesToStr creates a string pointing at the slice to avoid copying.
|
||||||
|
//
|
||||||
|
// Warning: the string returned by the function should be used with care, as the whole input data
|
||||||
|
// chunk may be either blocked from being freed by GC because of a single string or the buffer.Data
|
||||||
|
// may be garbage-collected even when the string exists.
|
||||||
|
func bytesToStr(data []byte) string {
|
||||||
|
h := (*reflect.SliceHeader)(unsafe.Pointer(&data))
|
||||||
|
shdr := reflect.StringHeader{Data: h.Data, Len: h.Len}
|
||||||
|
return *(*string)(unsafe.Pointer(&shdr))
|
||||||
|
}
|
13
vendor/github.com/mailru/easyjson/jlexer/bytestostr_nounsafe.go
generated
vendored
Normal file
13
vendor/github.com/mailru/easyjson/jlexer/bytestostr_nounsafe.go
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
// This file is included to the build if any of the buildtags below
|
||||||
|
// are defined. Refer to README notes for more details.
|
||||||
|
|
||||||
|
//+build easyjson_nounsafe appengine
|
||||||
|
|
||||||
|
package jlexer
|
||||||
|
|
||||||
|
// bytesToStr creates a string normally from []byte
|
||||||
|
//
|
||||||
|
// Note that this method is roughly 1.5x slower than using the 'unsafe' method.
|
||||||
|
func bytesToStr(data []byte) string {
|
||||||
|
return string(data)
|
||||||
|
}
|
428
vendor/github.com/mailru/easyjson/jlexer/lexer.go
generated
vendored
428
vendor/github.com/mailru/easyjson/jlexer/lexer.go
generated
vendored
@ -5,12 +5,14 @@
|
|||||||
package jlexer
|
package jlexer
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/base64"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"reflect"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"unicode"
|
||||||
|
"unicode/utf16"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
"unsafe"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// tokenKind determines type of a token.
|
// tokenKind determines type of a token.
|
||||||
@ -45,11 +47,13 @@ type Lexer struct {
|
|||||||
firstElement bool // Whether current element is the first in array or an object.
|
firstElement bool // Whether current element is the first in array or an object.
|
||||||
wantSep byte // A comma or a colon character, which need to occur before a token.
|
wantSep byte // A comma or a colon character, which need to occur before a token.
|
||||||
|
|
||||||
err error // Error encountered during lexing, if any.
|
UseMultipleErrors bool // If we want to use multiple errors.
|
||||||
|
fatalError error // Fatal error occurred during lexing. It is usually a syntax error.
|
||||||
|
multipleErrors []*LexerError // Semantic errors occurred during lexing. Marshalling will be continued after finding this errors.
|
||||||
}
|
}
|
||||||
|
|
||||||
// fetchToken scans the input for the next token.
|
// FetchToken scans the input for the next token.
|
||||||
func (r *Lexer) fetchToken() {
|
func (r *Lexer) FetchToken() {
|
||||||
r.token.kind = tokenUndef
|
r.token.kind = tokenUndef
|
||||||
r.start = r.pos
|
r.start = r.pos
|
||||||
|
|
||||||
@ -147,7 +151,7 @@ func (r *Lexer) fetchToken() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
r.err = io.EOF
|
r.fatalError = io.EOF
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -199,17 +203,6 @@ func (r *Lexer) fetchFalse() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// bytesToStr creates a string pointing at the slice to avoid copying.
|
|
||||||
//
|
|
||||||
// Warning: the string returned by the function should be used with care, as the whole input data
|
|
||||||
// chunk may be either blocked from being freed by GC because of a single string or the buffer.Data
|
|
||||||
// may be garbage-collected even when the string exists.
|
|
||||||
func bytesToStr(data []byte) string {
|
|
||||||
h := (*reflect.SliceHeader)(unsafe.Pointer(&data))
|
|
||||||
shdr := reflect.StringHeader{h.Data, h.Len}
|
|
||||||
return *(*string)(unsafe.Pointer(&shdr))
|
|
||||||
}
|
|
||||||
|
|
||||||
// fetchNumber scans a number literal token.
|
// fetchNumber scans a number literal token.
|
||||||
func (r *Lexer) fetchNumber() {
|
func (r *Lexer) fetchNumber() {
|
||||||
hasE := false
|
hasE := false
|
||||||
@ -265,6 +258,33 @@ func findStringLen(data []byte) (hasEscapes bool, length int) {
|
|||||||
return false, len(data)
|
return false, len(data)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// getu4 decodes \uXXXX from the beginning of s, returning the hex value,
|
||||||
|
// or it returns -1.
|
||||||
|
func getu4(s []byte) rune {
|
||||||
|
if len(s) < 6 || s[0] != '\\' || s[1] != 'u' {
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
var val rune
|
||||||
|
for i := 2; i < len(s) && i < 6; i++ {
|
||||||
|
var v byte
|
||||||
|
c := s[i]
|
||||||
|
switch c {
|
||||||
|
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
v = c - '0'
|
||||||
|
case 'a', 'b', 'c', 'd', 'e', 'f':
|
||||||
|
v = c - 'a' + 10
|
||||||
|
case 'A', 'B', 'C', 'D', 'E', 'F':
|
||||||
|
v = c - 'A' + 10
|
||||||
|
default:
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
val <<= 4
|
||||||
|
val |= rune(v)
|
||||||
|
}
|
||||||
|
return val
|
||||||
|
}
|
||||||
|
|
||||||
// processEscape processes a single escape sequence and returns number of bytes processed.
|
// processEscape processes a single escape sequence and returns number of bytes processed.
|
||||||
func (r *Lexer) processEscape(data []byte) (int, error) {
|
func (r *Lexer) processEscape(data []byte) (int, error) {
|
||||||
if len(data) < 2 {
|
if len(data) < 2 {
|
||||||
@ -292,39 +312,28 @@ func (r *Lexer) processEscape(data []byte) (int, error) {
|
|||||||
r.token.byteValue = append(r.token.byteValue, '\t')
|
r.token.byteValue = append(r.token.byteValue, '\t')
|
||||||
return 2, nil
|
return 2, nil
|
||||||
case 'u':
|
case 'u':
|
||||||
default:
|
rr := getu4(data)
|
||||||
return 0, fmt.Errorf("syntax error")
|
if rr < 0 {
|
||||||
}
|
return 0, errors.New("syntax error")
|
||||||
|
|
||||||
var val rune
|
|
||||||
|
|
||||||
for i := 2; i < len(data) && i < 6; i++ {
|
|
||||||
var v byte
|
|
||||||
c = data[i]
|
|
||||||
switch c {
|
|
||||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
|
||||||
v = c - '0'
|
|
||||||
case 'a', 'b', 'c', 'd', 'e', 'f':
|
|
||||||
v = c - 'a' + 10
|
|
||||||
case 'A', 'B', 'C', 'D', 'E', 'F':
|
|
||||||
v = c - 'A' + 10
|
|
||||||
default:
|
|
||||||
return 0, fmt.Errorf("syntax error")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
val <<= 4
|
read := 6
|
||||||
val |= rune(v)
|
if utf16.IsSurrogate(rr) {
|
||||||
|
rr1 := getu4(data[read:])
|
||||||
|
if dec := utf16.DecodeRune(rr, rr1); dec != unicode.ReplacementChar {
|
||||||
|
read += 6
|
||||||
|
rr = dec
|
||||||
|
} else {
|
||||||
|
rr = unicode.ReplacementChar
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var d [4]byte
|
||||||
|
s := utf8.EncodeRune(d[:], rr)
|
||||||
|
r.token.byteValue = append(r.token.byteValue, d[:s]...)
|
||||||
|
return read, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
l := utf8.RuneLen(val)
|
return 0, errors.New("syntax error")
|
||||||
if l == -1 {
|
|
||||||
return 0, fmt.Errorf("invalid unicode escape")
|
|
||||||
}
|
|
||||||
|
|
||||||
var d [4]byte
|
|
||||||
utf8.EncodeRune(d[:], val)
|
|
||||||
r.token.byteValue = append(r.token.byteValue, d[:l]...)
|
|
||||||
return 6, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// fetchString scans a string literal token.
|
// fetchString scans a string literal token.
|
||||||
@ -368,11 +377,11 @@ func (r *Lexer) fetchString() {
|
|||||||
|
|
||||||
// scanToken scans the next token if no token is currently available in the lexer.
|
// scanToken scans the next token if no token is currently available in the lexer.
|
||||||
func (r *Lexer) scanToken() {
|
func (r *Lexer) scanToken() {
|
||||||
if r.token.kind != tokenUndef || r.err != nil {
|
if r.token.kind != tokenUndef || r.fatalError != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
r.fetchToken()
|
r.FetchToken()
|
||||||
}
|
}
|
||||||
|
|
||||||
// consume resets the current token to allow scanning the next one.
|
// consume resets the current token to allow scanning the next one.
|
||||||
@ -383,20 +392,20 @@ func (r *Lexer) consume() {
|
|||||||
|
|
||||||
// Ok returns true if no error (including io.EOF) was encountered during scanning.
|
// Ok returns true if no error (including io.EOF) was encountered during scanning.
|
||||||
func (r *Lexer) Ok() bool {
|
func (r *Lexer) Ok() bool {
|
||||||
return r.err == nil
|
return r.fatalError == nil
|
||||||
}
|
}
|
||||||
|
|
||||||
const maxErrorContextLen = 13
|
const maxErrorContextLen = 13
|
||||||
|
|
||||||
func (r *Lexer) errParse(what string) {
|
func (r *Lexer) errParse(what string) {
|
||||||
if r.err == nil {
|
if r.fatalError == nil {
|
||||||
var str string
|
var str string
|
||||||
if len(r.Data)-r.pos <= maxErrorContextLen {
|
if len(r.Data)-r.pos <= maxErrorContextLen {
|
||||||
str = string(r.Data)
|
str = string(r.Data)
|
||||||
} else {
|
} else {
|
||||||
str = string(r.Data[r.pos:r.pos+maxErrorContextLen-3]) + "..."
|
str = string(r.Data[r.pos:r.pos+maxErrorContextLen-3]) + "..."
|
||||||
}
|
}
|
||||||
r.err = &LexerError{
|
r.fatalError = &LexerError{
|
||||||
Reason: what,
|
Reason: what,
|
||||||
Offset: r.pos,
|
Offset: r.pos,
|
||||||
Data: str,
|
Data: str,
|
||||||
@ -409,36 +418,64 @@ func (r *Lexer) errSyntax() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (r *Lexer) errInvalidToken(expected string) {
|
func (r *Lexer) errInvalidToken(expected string) {
|
||||||
if r.err == nil {
|
if r.fatalError != nil {
|
||||||
var str string
|
return
|
||||||
if len(r.token.byteValue) <= maxErrorContextLen {
|
|
||||||
str = string(r.token.byteValue)
|
|
||||||
} else {
|
|
||||||
str = string(r.token.byteValue[:maxErrorContextLen-3]) + "..."
|
|
||||||
}
|
|
||||||
r.err = &LexerError{
|
|
||||||
Reason: fmt.Sprintf("expected %s", expected),
|
|
||||||
Offset: r.pos,
|
|
||||||
Data: str,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
if r.UseMultipleErrors {
|
||||||
|
r.pos = r.start
|
||||||
|
r.consume()
|
||||||
|
r.SkipRecursive()
|
||||||
|
switch expected {
|
||||||
|
case "[":
|
||||||
|
r.token.delimValue = ']'
|
||||||
|
r.token.kind = tokenDelim
|
||||||
|
case "{":
|
||||||
|
r.token.delimValue = '}'
|
||||||
|
r.token.kind = tokenDelim
|
||||||
|
}
|
||||||
|
r.addNonfatalError(&LexerError{
|
||||||
|
Reason: fmt.Sprintf("expected %s", expected),
|
||||||
|
Offset: r.start,
|
||||||
|
Data: string(r.Data[r.start:r.pos]),
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var str string
|
||||||
|
if len(r.token.byteValue) <= maxErrorContextLen {
|
||||||
|
str = string(r.token.byteValue)
|
||||||
|
} else {
|
||||||
|
str = string(r.token.byteValue[:maxErrorContextLen-3]) + "..."
|
||||||
|
}
|
||||||
|
r.fatalError = &LexerError{
|
||||||
|
Reason: fmt.Sprintf("expected %s", expected),
|
||||||
|
Offset: r.pos,
|
||||||
|
Data: str,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Lexer) GetPos() int {
|
||||||
|
return r.pos
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delim consumes a token and verifies that it is the given delimiter.
|
// Delim consumes a token and verifies that it is the given delimiter.
|
||||||
func (r *Lexer) Delim(c byte) {
|
func (r *Lexer) Delim(c byte) {
|
||||||
if r.token.kind == tokenUndef && r.Ok() {
|
if r.token.kind == tokenUndef && r.Ok() {
|
||||||
r.fetchToken()
|
r.FetchToken()
|
||||||
}
|
}
|
||||||
|
|
||||||
if !r.Ok() || r.token.delimValue != c {
|
if !r.Ok() || r.token.delimValue != c {
|
||||||
|
r.consume() // errInvalidToken can change token if UseMultipleErrors is enabled.
|
||||||
r.errInvalidToken(string([]byte{c}))
|
r.errInvalidToken(string([]byte{c}))
|
||||||
|
} else {
|
||||||
|
r.consume()
|
||||||
}
|
}
|
||||||
r.consume()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsDelim returns true if there was no scanning error and next token is the given delimiter.
|
// IsDelim returns true if there was no scanning error and next token is the given delimiter.
|
||||||
func (r *Lexer) IsDelim(c byte) bool {
|
func (r *Lexer) IsDelim(c byte) bool {
|
||||||
if r.token.kind == tokenUndef && r.Ok() {
|
if r.token.kind == tokenUndef && r.Ok() {
|
||||||
r.fetchToken()
|
r.FetchToken()
|
||||||
}
|
}
|
||||||
return !r.Ok() || r.token.delimValue == c
|
return !r.Ok() || r.token.delimValue == c
|
||||||
}
|
}
|
||||||
@ -446,7 +483,7 @@ func (r *Lexer) IsDelim(c byte) bool {
|
|||||||
// Null verifies that the next token is null and consumes it.
|
// Null verifies that the next token is null and consumes it.
|
||||||
func (r *Lexer) Null() {
|
func (r *Lexer) Null() {
|
||||||
if r.token.kind == tokenUndef && r.Ok() {
|
if r.token.kind == tokenUndef && r.Ok() {
|
||||||
r.fetchToken()
|
r.FetchToken()
|
||||||
}
|
}
|
||||||
if !r.Ok() || r.token.kind != tokenNull {
|
if !r.Ok() || r.token.kind != tokenNull {
|
||||||
r.errInvalidToken("null")
|
r.errInvalidToken("null")
|
||||||
@ -457,7 +494,7 @@ func (r *Lexer) Null() {
|
|||||||
// IsNull returns true if the next token is a null keyword.
|
// IsNull returns true if the next token is a null keyword.
|
||||||
func (r *Lexer) IsNull() bool {
|
func (r *Lexer) IsNull() bool {
|
||||||
if r.token.kind == tokenUndef && r.Ok() {
|
if r.token.kind == tokenUndef && r.Ok() {
|
||||||
r.fetchToken()
|
r.FetchToken()
|
||||||
}
|
}
|
||||||
return r.Ok() && r.token.kind == tokenNull
|
return r.Ok() && r.token.kind == tokenNull
|
||||||
}
|
}
|
||||||
@ -465,7 +502,7 @@ func (r *Lexer) IsNull() bool {
|
|||||||
// Skip skips a single token.
|
// Skip skips a single token.
|
||||||
func (r *Lexer) Skip() {
|
func (r *Lexer) Skip() {
|
||||||
if r.token.kind == tokenUndef && r.Ok() {
|
if r.token.kind == tokenUndef && r.Ok() {
|
||||||
r.fetchToken()
|
r.FetchToken()
|
||||||
}
|
}
|
||||||
r.consume()
|
r.consume()
|
||||||
}
|
}
|
||||||
@ -476,7 +513,6 @@ func (r *Lexer) Skip() {
|
|||||||
// Note: no syntax validation is performed on the skipped data.
|
// Note: no syntax validation is performed on the skipped data.
|
||||||
func (r *Lexer) SkipRecursive() {
|
func (r *Lexer) SkipRecursive() {
|
||||||
r.scanToken()
|
r.scanToken()
|
||||||
|
|
||||||
var start, end byte
|
var start, end byte
|
||||||
|
|
||||||
if r.token.delimValue == '{' {
|
if r.token.delimValue == '{' {
|
||||||
@ -505,7 +541,7 @@ func (r *Lexer) SkipRecursive() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
case c == '\\' && inQuotes:
|
case c == '\\' && inQuotes:
|
||||||
wasEscape = true
|
wasEscape = !wasEscape
|
||||||
continue
|
continue
|
||||||
case c == '"' && inQuotes:
|
case c == '"' && inQuotes:
|
||||||
inQuotes = wasEscape
|
inQuotes = wasEscape
|
||||||
@ -515,7 +551,11 @@ func (r *Lexer) SkipRecursive() {
|
|||||||
wasEscape = false
|
wasEscape = false
|
||||||
}
|
}
|
||||||
r.pos = len(r.Data)
|
r.pos = len(r.Data)
|
||||||
r.err = io.EOF
|
r.fatalError = &LexerError{
|
||||||
|
Reason: "EOF reached while skipping array/object or token",
|
||||||
|
Offset: r.pos,
|
||||||
|
Data: string(r.Data[r.pos:]),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Raw fetches the next item recursively as a data slice
|
// Raw fetches the next item recursively as a data slice
|
||||||
@ -527,48 +567,107 @@ func (r *Lexer) Raw() []byte {
|
|||||||
return r.Data[r.start:r.pos]
|
return r.Data[r.start:r.pos]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IsStart returns whether the lexer is positioned at the start
|
||||||
|
// of an input string.
|
||||||
|
func (r *Lexer) IsStart() bool {
|
||||||
|
return r.pos == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Consumed reads all remaining bytes from the input, publishing an error if
|
||||||
|
// there is anything but whitespace remaining.
|
||||||
|
func (r *Lexer) Consumed() {
|
||||||
|
if r.pos > len(r.Data) || !r.Ok() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, c := range r.Data[r.pos:] {
|
||||||
|
if c != ' ' && c != '\t' && c != '\r' && c != '\n' {
|
||||||
|
r.AddError(&LexerError{
|
||||||
|
Reason: "invalid character '" + string(c) + "' after top-level value",
|
||||||
|
Offset: r.pos,
|
||||||
|
Data: string(r.Data[r.pos:]),
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
r.pos++
|
||||||
|
r.start++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Lexer) unsafeString() (string, []byte) {
|
||||||
|
if r.token.kind == tokenUndef && r.Ok() {
|
||||||
|
r.FetchToken()
|
||||||
|
}
|
||||||
|
if !r.Ok() || r.token.kind != tokenString {
|
||||||
|
r.errInvalidToken("string")
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
bytes := r.token.byteValue
|
||||||
|
ret := bytesToStr(r.token.byteValue)
|
||||||
|
r.consume()
|
||||||
|
return ret, bytes
|
||||||
|
}
|
||||||
|
|
||||||
// UnsafeString returns the string value if the token is a string literal.
|
// UnsafeString returns the string value if the token is a string literal.
|
||||||
//
|
//
|
||||||
// Warning: returned string may point to the input buffer, so the string should not outlive
|
// Warning: returned string may point to the input buffer, so the string should not outlive
|
||||||
// the input buffer. Intended pattern of usage is as an argument to a switch statement.
|
// the input buffer. Intended pattern of usage is as an argument to a switch statement.
|
||||||
func (r *Lexer) UnsafeString() string {
|
func (r *Lexer) UnsafeString() string {
|
||||||
if r.token.kind == tokenUndef && r.Ok() {
|
ret, _ := r.unsafeString()
|
||||||
r.fetchToken()
|
return ret
|
||||||
}
|
}
|
||||||
if !r.Ok() || r.token.kind != tokenString {
|
|
||||||
r.errInvalidToken("string")
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
ret := bytesToStr(r.token.byteValue)
|
// UnsafeBytes returns the byte slice if the token is a string literal.
|
||||||
r.consume()
|
func (r *Lexer) UnsafeBytes() []byte {
|
||||||
|
_, ret := r.unsafeString()
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
// String reads a string literal.
|
// String reads a string literal.
|
||||||
func (r *Lexer) String() string {
|
func (r *Lexer) String() string {
|
||||||
if r.token.kind == tokenUndef && r.Ok() {
|
if r.token.kind == tokenUndef && r.Ok() {
|
||||||
r.fetchToken()
|
r.FetchToken()
|
||||||
}
|
}
|
||||||
if !r.Ok() || r.token.kind != tokenString {
|
if !r.Ok() || r.token.kind != tokenString {
|
||||||
r.errInvalidToken("string")
|
r.errInvalidToken("string")
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
}
|
}
|
||||||
ret := string(r.token.byteValue)
|
ret := string(r.token.byteValue)
|
||||||
r.consume()
|
r.consume()
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Bytes reads a string literal and base64 decodes it into a byte slice.
|
||||||
|
func (r *Lexer) Bytes() []byte {
|
||||||
|
if r.token.kind == tokenUndef && r.Ok() {
|
||||||
|
r.FetchToken()
|
||||||
|
}
|
||||||
|
if !r.Ok() || r.token.kind != tokenString {
|
||||||
|
r.errInvalidToken("string")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
ret := make([]byte, base64.StdEncoding.DecodedLen(len(r.token.byteValue)))
|
||||||
|
len, err := base64.StdEncoding.Decode(ret, r.token.byteValue)
|
||||||
|
if err != nil {
|
||||||
|
r.fatalError = &LexerError{
|
||||||
|
Reason: err.Error(),
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
r.consume()
|
||||||
|
return ret[:len]
|
||||||
|
}
|
||||||
|
|
||||||
// Bool reads a true or false boolean keyword.
|
// Bool reads a true or false boolean keyword.
|
||||||
func (r *Lexer) Bool() bool {
|
func (r *Lexer) Bool() bool {
|
||||||
if r.token.kind == tokenUndef && r.Ok() {
|
if r.token.kind == tokenUndef && r.Ok() {
|
||||||
r.fetchToken()
|
r.FetchToken()
|
||||||
}
|
}
|
||||||
if !r.Ok() || r.token.kind != tokenBool {
|
if !r.Ok() || r.token.kind != tokenBool {
|
||||||
r.errInvalidToken("bool")
|
r.errInvalidToken("bool")
|
||||||
return false
|
return false
|
||||||
|
|
||||||
}
|
}
|
||||||
ret := r.token.boolValue
|
ret := r.token.boolValue
|
||||||
r.consume()
|
r.consume()
|
||||||
@ -577,12 +676,11 @@ func (r *Lexer) Bool() bool {
|
|||||||
|
|
||||||
func (r *Lexer) number() string {
|
func (r *Lexer) number() string {
|
||||||
if r.token.kind == tokenUndef && r.Ok() {
|
if r.token.kind == tokenUndef && r.Ok() {
|
||||||
r.fetchToken()
|
r.FetchToken()
|
||||||
}
|
}
|
||||||
if !r.Ok() || r.token.kind != tokenNumber {
|
if !r.Ok() || r.token.kind != tokenNumber {
|
||||||
r.errInvalidToken("number")
|
r.errInvalidToken("number")
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
}
|
}
|
||||||
ret := bytesToStr(r.token.byteValue)
|
ret := bytesToStr(r.token.byteValue)
|
||||||
r.consume()
|
r.consume()
|
||||||
@ -597,9 +695,11 @@ func (r *Lexer) Uint8() uint8 {
|
|||||||
|
|
||||||
n, err := strconv.ParseUint(s, 10, 8)
|
n, err := strconv.ParseUint(s, 10, 8)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.err = &LexerError{
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
Reason: err.Error(),
|
Reason: err.Error(),
|
||||||
}
|
Data: s,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return uint8(n)
|
return uint8(n)
|
||||||
}
|
}
|
||||||
@ -612,9 +712,11 @@ func (r *Lexer) Uint16() uint16 {
|
|||||||
|
|
||||||
n, err := strconv.ParseUint(s, 10, 16)
|
n, err := strconv.ParseUint(s, 10, 16)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.err = &LexerError{
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
Reason: err.Error(),
|
Reason: err.Error(),
|
||||||
}
|
Data: s,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return uint16(n)
|
return uint16(n)
|
||||||
}
|
}
|
||||||
@ -627,9 +729,11 @@ func (r *Lexer) Uint32() uint32 {
|
|||||||
|
|
||||||
n, err := strconv.ParseUint(s, 10, 32)
|
n, err := strconv.ParseUint(s, 10, 32)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.err = &LexerError{
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
Reason: err.Error(),
|
Reason: err.Error(),
|
||||||
}
|
Data: s,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return uint32(n)
|
return uint32(n)
|
||||||
}
|
}
|
||||||
@ -642,9 +746,11 @@ func (r *Lexer) Uint64() uint64 {
|
|||||||
|
|
||||||
n, err := strconv.ParseUint(s, 10, 64)
|
n, err := strconv.ParseUint(s, 10, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.err = &LexerError{
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
Reason: err.Error(),
|
Reason: err.Error(),
|
||||||
}
|
Data: s,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return n
|
return n
|
||||||
}
|
}
|
||||||
@ -661,9 +767,11 @@ func (r *Lexer) Int8() int8 {
|
|||||||
|
|
||||||
n, err := strconv.ParseInt(s, 10, 8)
|
n, err := strconv.ParseInt(s, 10, 8)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.err = &LexerError{
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
Reason: err.Error(),
|
Reason: err.Error(),
|
||||||
}
|
Data: s,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return int8(n)
|
return int8(n)
|
||||||
}
|
}
|
||||||
@ -676,9 +784,11 @@ func (r *Lexer) Int16() int16 {
|
|||||||
|
|
||||||
n, err := strconv.ParseInt(s, 10, 16)
|
n, err := strconv.ParseInt(s, 10, 16)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.err = &LexerError{
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
Reason: err.Error(),
|
Reason: err.Error(),
|
||||||
}
|
Data: s,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return int16(n)
|
return int16(n)
|
||||||
}
|
}
|
||||||
@ -691,9 +801,11 @@ func (r *Lexer) Int32() int32 {
|
|||||||
|
|
||||||
n, err := strconv.ParseInt(s, 10, 32)
|
n, err := strconv.ParseInt(s, 10, 32)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.err = &LexerError{
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
Reason: err.Error(),
|
Reason: err.Error(),
|
||||||
}
|
Data: s,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return int32(n)
|
return int32(n)
|
||||||
}
|
}
|
||||||
@ -706,9 +818,11 @@ func (r *Lexer) Int64() int64 {
|
|||||||
|
|
||||||
n, err := strconv.ParseInt(s, 10, 64)
|
n, err := strconv.ParseInt(s, 10, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.err = &LexerError{
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
Reason: err.Error(),
|
Reason: err.Error(),
|
||||||
}
|
Data: s,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return n
|
return n
|
||||||
}
|
}
|
||||||
@ -718,61 +832,69 @@ func (r *Lexer) Int() int {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (r *Lexer) Uint8Str() uint8 {
|
func (r *Lexer) Uint8Str() uint8 {
|
||||||
s := r.UnsafeString()
|
s, b := r.unsafeString()
|
||||||
if !r.Ok() {
|
if !r.Ok() {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
n, err := strconv.ParseUint(s, 10, 8)
|
n, err := strconv.ParseUint(s, 10, 8)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.err = &LexerError{
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
Reason: err.Error(),
|
Reason: err.Error(),
|
||||||
}
|
Data: string(b),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return uint8(n)
|
return uint8(n)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Lexer) Uint16Str() uint16 {
|
func (r *Lexer) Uint16Str() uint16 {
|
||||||
s := r.UnsafeString()
|
s, b := r.unsafeString()
|
||||||
if !r.Ok() {
|
if !r.Ok() {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
n, err := strconv.ParseUint(s, 10, 16)
|
n, err := strconv.ParseUint(s, 10, 16)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.err = &LexerError{
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
Reason: err.Error(),
|
Reason: err.Error(),
|
||||||
}
|
Data: string(b),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return uint16(n)
|
return uint16(n)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Lexer) Uint32Str() uint32 {
|
func (r *Lexer) Uint32Str() uint32 {
|
||||||
s := r.UnsafeString()
|
s, b := r.unsafeString()
|
||||||
if !r.Ok() {
|
if !r.Ok() {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
n, err := strconv.ParseUint(s, 10, 32)
|
n, err := strconv.ParseUint(s, 10, 32)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.err = &LexerError{
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
Reason: err.Error(),
|
Reason: err.Error(),
|
||||||
}
|
Data: string(b),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return uint32(n)
|
return uint32(n)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Lexer) Uint64Str() uint64 {
|
func (r *Lexer) Uint64Str() uint64 {
|
||||||
s := r.UnsafeString()
|
s, b := r.unsafeString()
|
||||||
if !r.Ok() {
|
if !r.Ok() {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
n, err := strconv.ParseUint(s, 10, 64)
|
n, err := strconv.ParseUint(s, 10, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.err = &LexerError{
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
Reason: err.Error(),
|
Reason: err.Error(),
|
||||||
}
|
Data: string(b),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return n
|
return n
|
||||||
}
|
}
|
||||||
@ -782,61 +904,69 @@ func (r *Lexer) UintStr() uint {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (r *Lexer) Int8Str() int8 {
|
func (r *Lexer) Int8Str() int8 {
|
||||||
s := r.UnsafeString()
|
s, b := r.unsafeString()
|
||||||
if !r.Ok() {
|
if !r.Ok() {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
n, err := strconv.ParseInt(s, 10, 8)
|
n, err := strconv.ParseInt(s, 10, 8)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.err = &LexerError{
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
Reason: err.Error(),
|
Reason: err.Error(),
|
||||||
}
|
Data: string(b),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return int8(n)
|
return int8(n)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Lexer) Int16Str() int16 {
|
func (r *Lexer) Int16Str() int16 {
|
||||||
s := r.UnsafeString()
|
s, b := r.unsafeString()
|
||||||
if !r.Ok() {
|
if !r.Ok() {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
n, err := strconv.ParseInt(s, 10, 16)
|
n, err := strconv.ParseInt(s, 10, 16)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.err = &LexerError{
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
Reason: err.Error(),
|
Reason: err.Error(),
|
||||||
}
|
Data: string(b),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return int16(n)
|
return int16(n)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Lexer) Int32Str() int32 {
|
func (r *Lexer) Int32Str() int32 {
|
||||||
s := r.UnsafeString()
|
s, b := r.unsafeString()
|
||||||
if !r.Ok() {
|
if !r.Ok() {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
n, err := strconv.ParseInt(s, 10, 32)
|
n, err := strconv.ParseInt(s, 10, 32)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.err = &LexerError{
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
Reason: err.Error(),
|
Reason: err.Error(),
|
||||||
}
|
Data: string(b),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return int32(n)
|
return int32(n)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Lexer) Int64Str() int64 {
|
func (r *Lexer) Int64Str() int64 {
|
||||||
s := r.UnsafeString()
|
s, b := r.unsafeString()
|
||||||
if !r.Ok() {
|
if !r.Ok() {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
n, err := strconv.ParseInt(s, 10, 64)
|
n, err := strconv.ParseInt(s, 10, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.err = &LexerError{
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
Reason: err.Error(),
|
Reason: err.Error(),
|
||||||
}
|
Data: string(b),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return n
|
return n
|
||||||
}
|
}
|
||||||
@ -853,9 +983,11 @@ func (r *Lexer) Float32() float32 {
|
|||||||
|
|
||||||
n, err := strconv.ParseFloat(s, 32)
|
n, err := strconv.ParseFloat(s, 32)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.err = &LexerError{
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
Reason: err.Error(),
|
Reason: err.Error(),
|
||||||
}
|
Data: s,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return float32(n)
|
return float32(n)
|
||||||
}
|
}
|
||||||
@ -868,27 +1000,53 @@ func (r *Lexer) Float64() float64 {
|
|||||||
|
|
||||||
n, err := strconv.ParseFloat(s, 64)
|
n, err := strconv.ParseFloat(s, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.err = &LexerError{
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
Reason: err.Error(),
|
Reason: err.Error(),
|
||||||
}
|
Data: s,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return n
|
return n
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Lexer) Error() error {
|
func (r *Lexer) Error() error {
|
||||||
return r.err
|
return r.fatalError
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Lexer) AddError(e error) {
|
func (r *Lexer) AddError(e error) {
|
||||||
if r.err == nil {
|
if r.fatalError == nil {
|
||||||
r.err = e
|
r.fatalError = e
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *Lexer) AddNonFatalError(e error) {
|
||||||
|
r.addNonfatalError(&LexerError{
|
||||||
|
Offset: r.start,
|
||||||
|
Data: string(r.Data[r.start:r.pos]),
|
||||||
|
Reason: e.Error(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Lexer) addNonfatalError(err *LexerError) {
|
||||||
|
if r.UseMultipleErrors {
|
||||||
|
// We don't want to add errors with the same offset.
|
||||||
|
if len(r.multipleErrors) != 0 && r.multipleErrors[len(r.multipleErrors)-1].Offset == err.Offset {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
r.multipleErrors = append(r.multipleErrors, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
r.fatalError = err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Lexer) GetNonFatalErrors() []*LexerError {
|
||||||
|
return r.multipleErrors
|
||||||
|
}
|
||||||
|
|
||||||
// Interface fetches an interface{} analogous to the 'encoding/json' package.
|
// Interface fetches an interface{} analogous to the 'encoding/json' package.
|
||||||
func (r *Lexer) Interface() interface{} {
|
func (r *Lexer) Interface() interface{} {
|
||||||
if r.token.kind == tokenUndef && r.Ok() {
|
if r.token.kind == tokenUndef && r.Ok() {
|
||||||
r.fetchToken()
|
r.FetchToken()
|
||||||
}
|
}
|
||||||
|
|
||||||
if !r.Ok() {
|
if !r.Ok() {
|
||||||
|
111
vendor/github.com/mailru/easyjson/jwriter/writer.go
generated
vendored
111
vendor/github.com/mailru/easyjson/jwriter/writer.go
generated
vendored
@ -2,6 +2,7 @@
|
|||||||
package jwriter
|
package jwriter
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/base64"
|
||||||
"io"
|
"io"
|
||||||
"strconv"
|
"strconv"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
@ -9,10 +10,22 @@ import (
|
|||||||
"github.com/mailru/easyjson/buffer"
|
"github.com/mailru/easyjson/buffer"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Flags describe various encoding options. The behavior may be actually implemented in the encoder, but
|
||||||
|
// Flags field in Writer is used to set and pass them around.
|
||||||
|
type Flags int
|
||||||
|
|
||||||
|
const (
|
||||||
|
NilMapAsEmpty Flags = 1 << iota // Encode nil map as '{}' rather than 'null'.
|
||||||
|
NilSliceAsEmpty // Encode nil slice as '[]' rather than 'null'.
|
||||||
|
)
|
||||||
|
|
||||||
// Writer is a JSON writer.
|
// Writer is a JSON writer.
|
||||||
type Writer struct {
|
type Writer struct {
|
||||||
Error error
|
Flags Flags
|
||||||
Buffer buffer.Buffer
|
|
||||||
|
Error error
|
||||||
|
Buffer buffer.Buffer
|
||||||
|
NoEscapeHTML bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// Size returns the size of the data that was written out.
|
// Size returns the size of the data that was written out.
|
||||||
@ -25,13 +38,24 @@ func (w *Writer) DumpTo(out io.Writer) (written int, err error) {
|
|||||||
return w.Buffer.DumpTo(out)
|
return w.Buffer.DumpTo(out)
|
||||||
}
|
}
|
||||||
|
|
||||||
// BuildBytes returns writer data as a single byte slice.
|
// BuildBytes returns writer data as a single byte slice. You can optionally provide one byte slice
|
||||||
func (w *Writer) BuildBytes() ([]byte, error) {
|
// as argument that it will try to reuse.
|
||||||
|
func (w *Writer) BuildBytes(reuse ...[]byte) ([]byte, error) {
|
||||||
if w.Error != nil {
|
if w.Error != nil {
|
||||||
return nil, w.Error
|
return nil, w.Error
|
||||||
}
|
}
|
||||||
|
|
||||||
return w.Buffer.BuildBytes(), nil
|
return w.Buffer.BuildBytes(reuse...), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadCloser returns an io.ReadCloser that can be used to read the data.
|
||||||
|
// ReadCloser also resets the buffer.
|
||||||
|
func (w *Writer) ReadCloser() (io.ReadCloser, error) {
|
||||||
|
if w.Error != nil {
|
||||||
|
return nil, w.Error
|
||||||
|
}
|
||||||
|
|
||||||
|
return w.Buffer.ReadCloser(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// RawByte appends raw binary data to the buffer.
|
// RawByte appends raw binary data to the buffer.
|
||||||
@ -44,7 +68,7 @@ func (w *Writer) RawString(s string) {
|
|||||||
w.Buffer.AppendString(s)
|
w.Buffer.AppendString(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
// RawByte appends raw binary data to the buffer or sets the error if it is given. Useful for
|
// Raw appends raw binary data to the buffer or sets the error if it is given. Useful for
|
||||||
// calling with results of MarshalJSON-like functions.
|
// calling with results of MarshalJSON-like functions.
|
||||||
func (w *Writer) Raw(data []byte, err error) {
|
func (w *Writer) Raw(data []byte, err error) {
|
||||||
switch {
|
switch {
|
||||||
@ -59,6 +83,34 @@ func (w *Writer) Raw(data []byte, err error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// RawText encloses raw binary data in quotes and appends in to the buffer.
|
||||||
|
// Useful for calling with results of MarshalText-like functions.
|
||||||
|
func (w *Writer) RawText(data []byte, err error) {
|
||||||
|
switch {
|
||||||
|
case w.Error != nil:
|
||||||
|
return
|
||||||
|
case err != nil:
|
||||||
|
w.Error = err
|
||||||
|
case len(data) > 0:
|
||||||
|
w.String(string(data))
|
||||||
|
default:
|
||||||
|
w.RawString("null")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Base64Bytes appends data to the buffer after base64 encoding it
|
||||||
|
func (w *Writer) Base64Bytes(data []byte) {
|
||||||
|
if data == nil {
|
||||||
|
w.Buffer.AppendString("null")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
w.Buffer.AppendByte('"')
|
||||||
|
dst := make([]byte, base64.StdEncoding.EncodedLen(len(data)))
|
||||||
|
base64.StdEncoding.Encode(dst, data)
|
||||||
|
w.Buffer.AppendBytes(dst)
|
||||||
|
w.Buffer.AppendByte('"')
|
||||||
|
}
|
||||||
|
|
||||||
func (w *Writer) Uint8(n uint8) {
|
func (w *Writer) Uint8(n uint8) {
|
||||||
w.Buffer.EnsureSpace(3)
|
w.Buffer.EnsureSpace(3)
|
||||||
w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
|
w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
|
||||||
@ -200,6 +252,16 @@ func (w *Writer) Bool(v bool) {
|
|||||||
|
|
||||||
const chars = "0123456789abcdef"
|
const chars = "0123456789abcdef"
|
||||||
|
|
||||||
|
func isNotEscapedSingleChar(c byte, escapeHTML bool) bool {
|
||||||
|
// Note: might make sense to use a table if there are more chars to escape. With 4 chars
|
||||||
|
// it benchmarks the same.
|
||||||
|
if escapeHTML {
|
||||||
|
return c != '<' && c != '>' && c != '&' && c != '\\' && c != '"' && c >= 0x20 && c < utf8.RuneSelf
|
||||||
|
} else {
|
||||||
|
return c != '\\' && c != '"' && c >= 0x20 && c < utf8.RuneSelf
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (w *Writer) String(s string) {
|
func (w *Writer) String(s string) {
|
||||||
w.Buffer.AppendByte('"')
|
w.Buffer.AppendByte('"')
|
||||||
|
|
||||||
@ -209,39 +271,32 @@ func (w *Writer) String(s string) {
|
|||||||
p := 0 // last non-escape symbol
|
p := 0 // last non-escape symbol
|
||||||
|
|
||||||
for i := 0; i < len(s); {
|
for i := 0; i < len(s); {
|
||||||
// single-with character
|
c := s[i]
|
||||||
if c := s[i]; c < utf8.RuneSelf {
|
|
||||||
var escape byte
|
if isNotEscapedSingleChar(c, !w.NoEscapeHTML) {
|
||||||
|
// single-width character, no escaping is required
|
||||||
|
i++
|
||||||
|
continue
|
||||||
|
} else if c < utf8.RuneSelf {
|
||||||
|
// single-with character, need to escape
|
||||||
|
w.Buffer.AppendString(s[p:i])
|
||||||
switch c {
|
switch c {
|
||||||
case '\t':
|
case '\t':
|
||||||
escape = 't'
|
w.Buffer.AppendString(`\t`)
|
||||||
case '\r':
|
case '\r':
|
||||||
escape = 'r'
|
w.Buffer.AppendString(`\r`)
|
||||||
case '\n':
|
case '\n':
|
||||||
escape = 'n'
|
w.Buffer.AppendString(`\n`)
|
||||||
case '\\':
|
case '\\':
|
||||||
escape = '\\'
|
w.Buffer.AppendString(`\\`)
|
||||||
case '"':
|
case '"':
|
||||||
escape = '"'
|
w.Buffer.AppendString(`\"`)
|
||||||
case '<', '>':
|
|
||||||
// do nothing
|
|
||||||
default:
|
default:
|
||||||
if c >= 0x20 {
|
|
||||||
// no escaping is required
|
|
||||||
i++
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if escape != 0 {
|
|
||||||
w.Buffer.AppendString(s[p:i])
|
|
||||||
w.Buffer.AppendByte('\\')
|
|
||||||
w.Buffer.AppendByte(escape)
|
|
||||||
} else {
|
|
||||||
w.Buffer.AppendString(s[p:i])
|
|
||||||
w.Buffer.AppendString(`\u00`)
|
w.Buffer.AppendString(`\u00`)
|
||||||
w.Buffer.AppendByte(chars[c>>4])
|
w.Buffer.AppendByte(chars[c>>4])
|
||||||
w.Buffer.AppendByte(chars[c&0xf])
|
w.Buffer.AppendByte(chars[c&0xf])
|
||||||
}
|
}
|
||||||
|
|
||||||
i++
|
i++
|
||||||
p = i
|
p = i
|
||||||
continue
|
continue
|
||||||
|
Loading…
Reference in New Issue
Block a user